diff -Nru sosreport-4.4/bin/sos sosreport-4.5.4ubuntu0.20.04.1/bin/sos --- sosreport-4.4/bin/sos 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/bin/sos 2023-05-26 22:32:49.000000000 +0530 @@ -20,5 +20,6 @@ if __name__ == '__main__': sos = SoS(sys.argv[1:]) sos.execute() + os._exit(0) # vim:ts=4 et sw=4 diff -Nru sosreport-4.4/.cirrus.yml sosreport-4.5.4ubuntu0.20.04.1/.cirrus.yml --- sosreport-4.4/.cirrus.yml 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/.cirrus.yml 2023-05-26 22:32:49.000000000 +0530 @@ -3,35 +3,41 @@ # Main environment vars to set for all tasks env: - FEDORA_VER: "35" - FEDORA_PRIOR_VER: "34" - FEDORA_NAME: "fedora-${FEDORA_VER}" - FEDORA_PRIOR_NAME: "fedora-${FEDORA_PRIOR_VER}" + FEDORA_NAME: "fedora-38" + FEDORA_PRIOR_NAME: "fedora-37" + + DEBIAN_NAME: "debian-11" UBUNTU_NAME: "ubuntu-22.04" UBUNTU_PRIOR_NAME: "ubuntu-20.04" + UBUNTU_PRIOR2_NAME: "ubuntu-18.04" CENTOS_9_NAME: "centos-stream-9" CENTOS_8_NAME: "centos-stream-8" CENTOS_PROJECT: "centos-cloud" + DEBIAN_PROJECT: "debian-cloud" FEDORA_PROJECT: "fedora-cloud" SOS_PROJECT: "sos-devel-jobs" UBUNTU_PROJECT: "ubuntu-os-cloud" - # These are generated images pushed to GCP from Red Hat - # FEDORA_PRIOR to be switched to "stock" images from the Fedora Project - # once the F36 image is pushed. - FEDORA_PRIOR_IMAGE_NAME: "f${FEDORA_PRIOR_VER}-server-sos-testing" - FOREMAN_CENTOS_IMAGE_NAME: "foreman-25-centos-8-sos-testing" - FOREMAN_DEBIAN_IMAGE_NAME: "foreman-25-debian-10-sos-testing" - # Images exist on GCP already - CENTOS_9_IMAGE_NAME: "centos-stream-9-v20220621" - CENTOS_8_IMAGE_NAME: "centos-stream-8-v20220621" - FEDORA_IMAGE_NAME: "fedora-cloud-base-gcp-${FEDORA_VER}-1-2-x86-64" - UBUNTU_IMAGE_NAME: "ubuntu-2204-jammy-v20220622" - UBUNTU_PRIOR_IMAGE_NAME: "ubuntu-2004-focal-v20220701" + CENTOS_9_IMAGE_NAME: "centos-stream-9-v20221102" + CENTOS_8_IMAGE_NAME: "centos-stream-8-v20230306" + DEBIAN_IMAGE_NAME: "debian-11-bullseye-v20230306" + FEDORA_IMAGE_NAME: "fedora-cloud-base-gcp-38-1-6-x86-64" + FEDORA_PRIOR_IMAGE_NAME: "fedora-cloud-base-gcp-37-1-7-x86-64" + UBUNTU_IMAGE_NAME: "ubuntu-2204-jammy-v20230302" + UBUNTU_PRIOR_IMAGE_NAME: "ubuntu-2004-focal-v20230302" + UBUNTU_PRIOR2_IMAGE_NAME: "ubuntu-1804-bionic-v20230324" + UBUNTU_SNAP_IMAGE_NAME: "ubuntu-2204-jammy-v20230302" + + # Curl-command prefix for downloading task artifacts, simply add the + # the url-encoded task name, artifact name, and path as a suffix. + # This approach originally seen in the podman project. + ARTCURL: >- + curl --fail --location -O + --url https://api.cirrus-ci.com/v1/artifact/build/${CIRRUS_BUILD_ID} # Default task timeout timeout_in: 30m @@ -70,69 +76,120 @@ # Make sure a user can manually build an rpm from the checkout rpm_build_task: alias: "rpm_build" - name: "RPM Build From Checkout" - container: - image: "fedora" - setup_script: dnf -y install rpm-build rpmdevtools gettext python3-devel - main_script: | - rpmdev-setuptree - python3 setup.py sdist - cp dist/sos*.tar.gz ~/rpmbuild/SOURCES - rpmbuild -bs sos.spec - rpmbuild -bb sos.spec - -# Run the stage one (no mocking) tests across all distros on GCP -report_stageone_task: - alias: "stageone_report" - name: "Report Stage One - $BUILD_NAME" + name: "rpm Build From Checkout - ${BUILD_NAME}" gce_instance: &standardvm image_project: "${PROJECT}" image_name: "${VM_IMAGE_NAME}" type: e2-medium matrix: - - env: + - env: ¢os9 PROJECT: ${CENTOS_PROJECT} BUILD_NAME: ${CENTOS_9_NAME} VM_IMAGE_NAME: ${CENTOS_9_IMAGE_NAME} - - env: + - env: ¢os8 PROJECT: ${CENTOS_PROJECT} BUILD_NAME: ${CENTOS_8_NAME} VM_IMAGE_NAME: ${CENTOS_8_IMAGE_NAME} - - env: + - env: &fedora PROJECT: ${FEDORA_PROJECT} BUILD_NAME: ${FEDORA_NAME} VM_IMAGE_NAME: ${FEDORA_IMAGE_NAME} - - env: - PROJECT: ${SOS_PROJECT} + - env: &fedoraprior + PROJECT: ${FEDORA_PROJECT} BUILD_NAME: ${FEDORA_PRIOR_NAME} VM_IMAGE_NAME: ${FEDORA_PRIOR_IMAGE_NAME} - - env: + setup_script: | + dnf clean all + dnf -y install rpm-build rpmdevtools gettext python3-devel + main_script: | + mkdir -p /rpmbuild/{BUILD,BUILDROOT,RPMS,SRPMS,SOURCES} + python3 setup.py sdist + cp dist/sos*.tar.gz /rpmbuild/SOURCES/ + rpmbuild -bs sos.spec + rpmbuild -bb sos.spec + # Retrieving the built rpm in later tasks requires knowing the exact name + # of the file. To avoid having to juggle version numbers here, rename it + prep_artifacts_script: mv /rpmbuild/RPMS/noarch/sos-*.rpm ./sos_${BUILD_NAME}.rpm + packages_artifacts: + path: ./sos_${BUILD_NAME}.rpm + type: application/octet-stream + +# Make sure a user can manually build a snap from the checkout +snap_build_task: + alias: "snap_build" + name: "snap Build From Checkout" + gce_instance: + image_project: "${UBUNTU_PROJECT}" + image_name: "${UBUNTU_SNAP_IMAGE_NAME}" + type: e2-medium + setup_script: | + apt update + apt -y install snapd + systemctl start snapd + sed -i -e 's/adopt-info.*/version: test/g' -e '/set version/d' snap/snapcraft.yaml + snap install snapcraft --classic + main_script: | + snapcraft --destructive-mode + packages_artifacts: + path: "*.snap" + on_failure: + fail_script: | + ls -d /root/.cache/snapcraft/log 2> /dev/null | xargs tar cf snap-build-fail-logs.tar + log_artifacts: + path: "snap-build-fail-logs.tar" + +# Run the stage one (no mocking) tests across all distros on GCP +report_stageone_task: + alias: "stageone_report" + name: "Report Stage One - $BUILD_NAME" + depends_on: + - rpm_build + - snap_build + gce_instance: *standardvm + matrix: + - env: *centos9 + - env: *centos8 + - env: *fedora + - env: *fedoraprior + - env: &ubuntu PROJECT: ${UBUNTU_PROJECT} BUILD_NAME: ${UBUNTU_NAME} VM_IMAGE_NAME: ${UBUNTU_IMAGE_NAME} - - env: + - env: &ubuntuprior PROJECT: ${UBUNTU_PROJECT} BUILD_NAME: ${UBUNTU_PRIOR_NAME} VM_IMAGE_NAME: ${UBUNTU_PRIOR_IMAGE_NAME} - remove_sos_script: &remove_sos | + - env: &ubuntuprior2 + PROJECT: ${UBUNTU_PROJECT} + BUILD_NAME: ${UBUNTU_PRIOR2_NAME} + VM_IMAGE_NAME: ${UBUNTU_PRIOR2_IMAGE_NAME} + setup_script: &setup | if [ $(command -v apt) ]; then + echo "$ARTCURL/snap%20Build%20From%20Checkout/packages/sosreport_test_amd64.snap" + $ARTCURL/snap%20Build%20From%20Checkout/packages/sosreport_test_amd64.snap apt -y purge sosreport apt update --allow-releaseinfo-change - apt -y install python3-pip + apt -y install python3-pip snapd + systemctl start snapd + snap install ./sosreport_test_amd64.snap --classic --dangerous + snap alias sosreport.sos sos fi if [ $(command -v dnf) ]; then + echo "$ARTCURL/rpm%20Build%20From%20Checkout%20-%20${BUILD_NAME}/packages/sos_${BUILD_NAME}.rpm" + $ARTCURL/rpm%20Build%20From%20Checkout%20-%20${BUILD_NAME}/packages/sos_${BUILD_NAME}.rpm dnf -y remove sos dnf -y install python3-pip ethtool + dnf -y install ./sos_${BUILD_NAME}.rpm fi - setup_script: &setup 'pip3 install avocado-framework==94.0 python-magic pyyaml' + pip3 install avocado-framework==94.0 # run the unittests separately as they require a different PYTHONPATH in # order for the imports to work properly under avocado unittest_script: PYTHONPATH=. avocado run tests/unittests/ - main_script: PYTHONPATH=tests/ avocado run --test-runner=runner -t stageone tests/{cleaner,collect,report,vendor}_tests + main_script: PYTHONPATH=tests/ avocado run -p TESTLOCAL=true --test-runner=runner -t stageone tests/{cleaner,collect,report,vendor}_tests on_failure: fail_script: &faillogs | ls -d /var/tmp/avocado* /root/avocado* 2> /dev/null | xargs tar cf sos-fail-logs.tar - log_artifacts: + log_artifacts: &logs path: "sos-fail-logs.tar" # IFF the stage one tests all pass, then run stage two for latest distros @@ -140,25 +197,14 @@ alias: "stagetwo_report" name: "Report Stage Two - $BUILD_NAME" depends_on: stageone_report + timeout_in: 45m gce_instance: *standardvm matrix: - - env: - PROJECT: ${CENTOS_PROJECT} - BUILD_NAME: ${CENTOS_9_NAME} - VM_IMAGE_NAME: ${CENTOS_9_IMAGE_NAME} - - env: - PROJECT: ${CENTOS_PROJECT} - BUILD_NAME: ${CENTOS_8_NAME} - VM_IMAGE_NAME: ${CENTOS_8_IMAGE_NAME} - - env: - PROJECT: ${FEDORA_PROJECT} - BUILD_NAME: ${FEDORA_NAME} - VM_IMAGE_NAME: ${FEDORA_IMAGE_NAME} - - env: - PROJECT: ${UBUNTU_PROJECT} - BUILD_NAME: ${UBUNTU_NAME} - VM_IMAGE_NAME: ${UBUNTU_IMAGE_NAME} - remove_sos_script: *remove_sos + - env: *centos9 + - env: *centos8 + - env: *fedora + - env: *ubuntu + setup_script: *setup install_pexpect_script: | if [ $(command -v apt) ]; then apt -y install python3-pexpect @@ -166,34 +212,38 @@ if [ $(command -v dnf) ]; then dnf -y install python3-pexpect fi - setup_script: *setup - main_script: PYTHONPATH=tests/ avocado run --test-runner=runner -t stagetwo tests/{cleaner,collect,report,vendor}_tests + main_script: PYTHONPATH=tests/ avocado run -p TESTLOCAL=true --test-runner=runner -t stagetwo tests/{cleaner,collect,report,vendor}_tests on_failure: fail_script: *faillogs - log_artifacts: - path: "sos-fail-logs.tar" + log_artifacts: *logs report_foreman_task: skip: "!changesInclude('.cirrus.yml', '**/{__init__,apache,foreman,foreman_tests,candlepin,pulp,pulpcore}.py')" + timeout_in: 45m alias: "foreman_integration" - name: "Integration Test - Foreman ${FOREMAN_VER}" + name: "Integration Test - Foreman ${FOREMAN_VER} - ${BUILD_NAME}" depends_on: stageone_report gce_instance: &bigvm <<: *standardvm type: e2-standard-2 matrix: - env: - PROJECT: ${SOS_PROJECT} - VM_IMAGE_NAME: ${FOREMAN_CENTOS_IMAGE_NAME} - FOREMAN_VER: "2.5 - CentOS Stream 8" - - env: - PROJECT: ${SOS_PROJECT} - VM_IMAGE_NAME: ${FOREMAN_DEBIAN_IMAGE_NAME} - FOREMAN_VER: "2.5 - Debian 10" - remove_sos_script: *remove_sos + <<: *centos8 + FOREMAN_VER: "2.5" + - env: + <<: *centos8 + FOREMAN_VER: "3.1" + - env: + <<: *centos8 + FOREMAN_VER: "3.4" + - env: + PROJECT: ${DEBIAN_PROJECT} + VM_IMAGE_NAME: ${DEBIAN_IMAGE_NAME} + BUILD_NAME: ${DEBIAN_NAME} + FOREMAN_VER: "3.4" setup_script: *setup - main_script: PYTHONPATH=tests/ avocado run --test-runner=runner -t foreman tests/product_tests/foreman/ + foreman_setup_script: ./tests/test_data/foreman_setup.sh + main_script: PYTHONPATH=tests/ avocado run -p TESTLOCAL=true --test-runner=runner -t foreman tests/product_tests/foreman/ on_failure: fail_script: *faillogs - log_artifacts: - path: "sos-fail-logs.tar" + log_artifacts: *logs diff -Nru sosreport-4.4/debian/changelog sosreport-4.5.4ubuntu0.20.04.1/debian/changelog --- sosreport-4.4/debian/changelog 2022-08-17 09:51:47.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/debian/changelog 2023-06-13 10:18:11.000000000 +0530 @@ -1,3 +1,27 @@ +sosreport (4.5.4ubuntu0.20.04.1) focal; urgency=medium + + * New 4.5.4 upstream. (LP: #2022915) + + * For more details, full release note is available here: + - https://github.com/sosreport/sos/releases/tag/4.5.4 + + * d/control: + - Add 'python3-pexpect' as part of the build depends. + + * d/rules: + - skip running unittests/policy_tests.py due to the avocado dependency + + * Former patches, now fixed: + - d/p/0002-revert-to-old-style-binary-file-detection.patch + + * Remaining patches: + - d/p/0001-debian-change-tmp-dir-location.patch + + * New patches: + - d/p/0002-regex-flags.patch (LP: #2024547) + + -- Nikhil Kshirsagar Tue, 13 Jun 2023 04:48:11 +0000 + sosreport (4.4-1ubuntu0.20.04.1) focal; urgency=medium * New 4.4 upstream. (LP: #1986611) diff -Nru sosreport-4.4/debian/control sosreport-4.5.4ubuntu0.20.04.1/debian/control --- sosreport-4.4/debian/control 2022-08-17 09:51:47.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/debian/control 2023-06-13 10:18:11.000000000 +0530 @@ -12,6 +12,7 @@ python3-nose, python3-setuptools, python3-sphinx, + python3-pexpect, Homepage: https://github.com/sosreport/sos Vcs-Browser: https://salsa.debian.org/sosreport-team/sosreport Vcs-Git: https://salsa.debian.org/sosreport-team/sosreport.git diff -Nru sosreport-4.4/debian/patches/0002-regex-flags.patch sosreport-4.5.4ubuntu0.20.04.1/debian/patches/0002-regex-flags.patch --- sosreport-4.4/debian/patches/0002-regex-flags.patch 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/debian/patches/0002-regex-flags.patch 2023-06-13 10:17:31.000000000 +0530 @@ -0,0 +1,35 @@ +Description: Fix do_file_private_sub() bug related to regex flags +Origin: upstream +Bug: https://github.com/sosreport/sos/issues/3261 +Applied-Upstream: https://github.com/sosreport/sos/pull/3263 +--- +This patch header follows DEP-3: http://dep.debian.net/deps/dep3/ +Index: sos-4.5.4/sos/report/plugins/__init__.py +=================================================================== +--- sos-4.5.4.orig/sos/report/plugins/__init__.py ++++ sos-4.5.4/sos/report/plugins/__init__.py +@@ -1274,7 +1274,12 @@ class Plugin(): + """ + try: + path = self._get_dest_for_srcpath(srcpath) +- pattern = regexp.pattern if hasattr(regexp, "pattern") else regexp ++ if hasattr(regexp, "pattern"): ++ pattern = regexp.pattern ++ flags = regexp.flags | re.IGNORECASE ++ else: ++ pattern = regexp ++ flags = re.IGNORECASE + self._log_debug("substituting scrpath '%s'" % srcpath) + self._log_debug("substituting '%s' for '%s' in '%s'" + % (subst, pattern, path)) +@@ -1284,8 +1289,8 @@ class Plugin(): + content = readable.read() + if not isinstance(content, str): + content = content.decode('utf8', 'ignore') +- result, replacements = re.subn(regexp, subst, content, +- flags=re.IGNORECASE) ++ result, replacements = re.subn(pattern, subst, content, ++ flags=flags) + if replacements: + self.archive.add_string(result, srcpath) + else: diff -Nru sosreport-4.4/debian/patches/0002-revert-to-old-style-binary-file-detection.patch sosreport-4.5.4ubuntu0.20.04.1/debian/patches/0002-revert-to-old-style-binary-file-detection.patch --- sosreport-4.4/debian/patches/0002-revert-to-old-style-binary-file-detection.patch 2022-08-17 09:51:47.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/debian/patches/0002-revert-to-old-style-binary-file-detection.patch 1970-01-01 05:30:00.000000000 +0530 @@ -1,79 +0,0 @@ -Description: Revert to old style of checking if a file is binary - Reverting to older way of checking if a file is binary - because 4.4 approach depends on python3-magic version 0.4.20 - which does not exist in focal. - . - sosreport (4.4-1ubuntu0.20.04.1) focal; urgency=medium - . - * New 4.4 upstream. (LP: #1986611) - . - * For more details, full release note is available here: - - https://github.com/sosreport/sos/releases/tag/4.3 - . - * New patches: - - d/p/0002-revert-to-old-style-binary-file-detection.patch - . - * Former patches, now fixed: - - d/p/0002-fix-setup-py.patch - - d/p/0003-mention-sos-help-in-sos-manpage.patch - . - * Remaining patches: - - d/p/0001-debian-change-tmp-dir-location.patch -Author: Nikhil Kshirsagar -Bug-Ubuntu: https://bugs.launchpad.net/bugs/1986611 - ---- sosreport-4.4.orig/requirements.txt -+++ sosreport-4.4/requirements.txt -@@ -2,5 +2,4 @@ pycodestyle>=2.4.0 - coverage>=4.0.3 - Sphinx>=1.3.5 - pexpect>=4.0.0 --python_magic>=0.4.20 - pyyaml ---- sosreport-4.4.orig/setup.py -+++ sosreport-4.4/setup.py -@@ -107,7 +107,7 @@ setup( - ], - cmdclass=cmdclass, - command_options=command_options, -- requires=['pexpect', 'python_magic', 'pyyaml'] -+ requires=['pexpect', 'pyyaml'] - ) - - ---- sosreport-4.4.orig/sos/utilities.py -+++ sosreport-4.4/sos/utilities.py -@@ -19,7 +19,6 @@ import tempfile - import threading - import time - import io --import magic - - from contextlib import closing - from collections import deque -@@ -75,17 +74,14 @@ def file_is_binary(fname): - :returns: True if binary, else False - :rtype: ``bool`` - """ -- try: -- _ftup = magic.detect_from_filename(fname) -- _mimes = ['text/', 'inode/'] -- return ( -- _ftup.encoding == 'binary' and not -- any(_ftup.mime_type.startswith(_mt) for _mt in _mimes) -- ) -- except Exception: -- # if for some reason this check fails, don't blindly remove all files -- # but instead rely on other checks done by the component -- return False -+ with open(fname, 'tr') as tfile: -+ try: -+ # when opened as above (tr), reading binary content will raise -+ # an exception -+ tfile.read(1) -+ return False -+ except UnicodeDecodeError: -+ return True - - - def find(file_pattern, top_dir, max_depth=None, path_pattern=None): diff -Nru sosreport-4.4/debian/patches/series sosreport-4.5.4ubuntu0.20.04.1/debian/patches/series --- sosreport-4.4/debian/patches/series 2022-08-17 09:51:47.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/debian/patches/series 2023-06-13 10:15:19.000000000 +0530 @@ -1,2 +1,2 @@ 0001-debian-change-tmp-dir-location.patch -0002-revert-to-old-style-binary-file-detection.patch +0002-regex-flags.patch diff -Nru sosreport-4.4/debian/rules sosreport-4.5.4ubuntu0.20.04.1/debian/rules --- sosreport-4.4/debian/rules 2021-11-01 18:32:20.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/debian/rules 2023-06-13 10:18:11.000000000 +0530 @@ -12,4 +12,4 @@ rm -rf debian/sosreport/usr/config override_dh_auto_test: - nosetests3 -v --with-cover --cover-package=sos tests/unittests + nosetests3 -v --with-cover --cover-package=sos tests/unittests --ignore-files="policy_tests\.py" diff -Nru sosreport-4.4/docs/conf.py sosreport-4.5.4ubuntu0.20.04.1/docs/conf.py --- sosreport-4.4/docs/conf.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/docs/conf.py 2023-05-26 22:32:49.000000000 +0530 @@ -59,9 +59,9 @@ # built documents. # # The short X.Y version. -version = '4.4' +version = '4.5.4' # The full version, including alpha/beta/rc tags. -release = '4.4' +release = '4.5.4' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff -Nru sosreport-4.4/.fmf/version sosreport-4.5.4ubuntu0.20.04.1/.fmf/version --- sosreport-4.4/.fmf/version 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/.fmf/version 1970-01-01 05:30:00.000000000 +0530 @@ -1 +0,0 @@ -1 diff -Nru sosreport-4.4/.github/codeql/codeql-config.yaml sosreport-4.5.4ubuntu0.20.04.1/.github/codeql/codeql-config.yaml --- sosreport-4.4/.github/codeql/codeql-config.yaml 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/.github/codeql/codeql-config.yaml 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,4 @@ +name: "SoS CodeQL Config" + +paths: + - sos diff -Nru sosreport-4.4/.github/workflows/codeql.yaml sosreport-4.5.4ubuntu0.20.04.1/.github/workflows/codeql.yaml --- sosreport-4.4/.github/workflows/codeql.yaml 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/.github/workflows/codeql.yaml 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,42 @@ +name: "CodeQL" + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + schedule: + - cron: "49 12 * * 6" + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ python ] + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + config-file: .github/codeql/codeql-config.yaml + languages: ${{ matrix.language }} + queries: +security-and-quality + + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{ matrix.language }}" diff -Nru sosreport-4.4/.github/workflows/snap.yaml sosreport-4.5.4ubuntu0.20.04.1/.github/workflows/snap.yaml --- sosreport-4.4/.github/workflows/snap.yaml 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/.github/workflows/snap.yaml 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,32 @@ +name: snap +on: + push: + branches: + - main + +jobs: + build: + runs-on: ubuntu-latest + concurrency: + group: snap-build + cancel-in-progress: true + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: snapcore/action-build@v1 + id: build-snap + # Make sure the snap is installable + - run: | + sudo apt -y remove sosreport + sudo snap install --classic --dangerous ${{ steps.build-snap.outputs.snap }} + sudo snap alias sosreport.sos sos + # Do some testing with the snap + - run: | + sudo sos help + - uses: snapcore/action-publish@v1 + env: + SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.STORE_LOGIN }} + with: + snap: ${{ steps.build-snap.outputs.snap }} + release: "latest/edge" diff -Nru sosreport-4.4/.gitignore sosreport-4.5.4ubuntu0.20.04.1/.gitignore --- sosreport-4.4/.gitignore 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/.gitignore 2023-05-26 22:32:49.000000000 +0530 @@ -16,16 +16,8 @@ MANIFEST build/ dist/ +*sos.egg* docs/_build # Pycharm -.idea/**/workspace.xml -.idea/**/tasks.xml -.idea/dictionaries -.idea/**/dataSources/ -.idea/**/dataSources.ids -.idea/**/dataSources.xml -.idea/**/dataSources.local.xml -.idea/**/sqlDataSources.xml -.idea/**/dynamic.xml -.idea/**/uiDesigner.xml +.idea/ diff -Nru sosreport-4.4/man/en/sos-clean.1 sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-clean.1 --- sosreport-4.4/man/en/sos-clean.1 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-clean.1 2023-05-26 22:32:49.000000000 +0530 @@ -60,8 +60,8 @@ the target archive, so only use this option when absolutely necessary or you have complete trust in the party/parties that may handle the generated report. -Valid values for this option are currently: \fBhostname\fR, \fBip\fR, \fBmac\fR, \fBkeyword\fR, -and \fBusername\fR. +Valid values for this option are currently: \fBhostname\fR, \fBip\fR, \fBipv6\fR, +\fBmac\fR, \fBkeyword\fR, and \fBusername\fR. .TP .B \-\-keywords KEYWORDS Provide a comma-delimited list of keywords to scrub in addition to the default parsers. diff -Nru sosreport-4.4/man/en/sos-collect.1 sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-collect.1 --- sosreport-4.4/man/en/sos-collect.1 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-collect.1 2023-05-26 22:32:49.000000000 +0530 @@ -41,7 +41,6 @@ [\-\-skip-files FILES] [\-s|\-\-sysroot SYSROOT] [\-\-ssh\-user SSH_USER] - [\-\-sos-cmd SOS_CMD] [\-t|\-\-threads THREADS] [\-\-timeout TIMEOUT] [\-\-transport TRANSPORT] @@ -177,7 +176,7 @@ \fB\-\-save\-group\fR GROUP Save the results of this run of sos collect to a host group definition. -sos-colllector will write a JSON-formatted file with name GROUP to /var/lib/sos collect/ +sos-collector will write a JSON-formatted file with name GROUP to /var/lib/sos collect/ with the settings for cluster-type, primary, and the node list as discovered by cluster enumeration. Note that this means regexes are not directly saved to host groups, but the results of matching against those regexes are. @@ -259,7 +258,7 @@ .TP \fB\-\-image IMAGE\fR Specify an image to use for the temporary container created for collections on -containerized host, if you do not want to use the default image specifed by the +containerized host, if you do not want to use the default image specified by the host's policy. Note that this should include the registry. .TP \fB\-\-force-pull-image TOGGLE, \-\-pull TOGGLE\fR @@ -339,15 +338,6 @@ \fB\-s\fR SYSROOT, \fB\-\-sysroot\fR SYSROOT Sosreport option. Specify an alternate root file system path. .TP -\fB\-\-sos-cmd\fR SOS_CMD -Define all options that sosreport should be run with on the nodes. This will -override any other commandline options as well as any options specified by a -cluster profile. - -The sosreport command will execute as 'sosreport --batch SOS_CMD'. The BATCH -option cannot be removed from the sosreport command as it is required to run -sosreport non-interactively for sos collect to function. -.TP \fB\-t\fR THREADS \fB\-\-threads\fR THREADS Report option. Specify the number of collection threads to run. @@ -388,7 +378,7 @@ \fB\-v\fR \fB\-\-verbose\fR Print debug information to screen. .TP -\fB\-\-verfiy\fR +\fB\-\-verify\fR Sosreport option. Passes the "--verify" option to sosreport on the nodes which causes sosreport to validate plugin-specific data during collection. diff -Nru sosreport-4.4/man/en/sos-collector.1 sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-collector.1 --- sosreport-4.4/man/en/sos-collector.1 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-collector.1 2023-05-26 22:32:49.000000000 +0530 @@ -41,7 +41,6 @@ [\-\-skip-files FILES] [\-s|\-\-sysroot SYSROOT] [\-\-ssh\-user SSH_USER] - [\-\-sos-cmd SOS_CMD] [\-t|\-\-threads THREADS] [\-\-timeout TIMEOUT] [\-\-transport TRANSPORT] @@ -177,7 +176,7 @@ \fB\-\-save\-group\fR GROUP Save the results of this run of sos collect to a host group definition. -sos-colllector will write a JSON-formatted file with name GROUP to /var/lib/sos collect/ +sos-collector will write a JSON-formatted file with name GROUP to /var/lib/sos collect/ with the settings for cluster-type, primary, and the node list as discovered by cluster enumeration. Note that this means regexes are not directly saved to host groups, but the results of matching against those regexes are. @@ -259,7 +258,7 @@ .TP \fB\-\-image IMAGE\fR Specify an image to use for the temporary container created for collections on -containerized host, if you do not want to use the default image specifed by the +containerized host, if you do not want to use the default image specified by the host's policy. Note that this should include the registry. .TP \fB\-\-force-pull-image TOGGLE, \-\-pull TOGGLE\fR @@ -339,15 +338,6 @@ \fB\-s\fR SYSROOT, \fB\-\-sysroot\fR SYSROOT Sosreport option. Specify an alternate root file system path. .TP -\fB\-\-sos-cmd\fR SOS_CMD -Define all options that sosreport should be run with on the nodes. This will -override any other commandline options as well as any options specified by a -cluster profile. - -The sosreport command will execute as 'sosreport --batch SOS_CMD'. The BATCH -option cannot be removed from the sosreport command as it is required to run -sosreport non-interactively for sos collect to function. -.TP \fB\-t\fR THREADS \fB\-\-threads\fR THREADS Report option. Specify the number of collection threads to run. @@ -388,7 +378,7 @@ \fB\-v\fR \fB\-\-verbose\fR Print debug information to screen. .TP -\fB\-\-verfiy\fR +\fB\-\-verify\fR Sosreport option. Passes the "--verify" option to sosreport on the nodes which causes sosreport to validate plugin-specific data during collection. diff -Nru sosreport-4.4/man/en/sos-mask.1 sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-mask.1 --- sosreport-4.4/man/en/sos-mask.1 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-mask.1 2023-05-26 22:32:49.000000000 +0530 @@ -60,8 +60,8 @@ the target archive, so only use this option when absolutely necessary or you have complete trust in the party/parties that may handle the generated report. -Valid values for this option are currently: \fBhostname\fR, \fBip\fR, \fBmac\fR, \fBkeyword\fR, -and \fBusername\fR. +Valid values for this option are currently: \fBhostname\fR, \fBip\fR, \fBipv6\fR, +\fBmac\fR, \fBkeyword\fR, and \fBusername\fR. .TP .B \-\-keywords KEYWORDS Provide a comma-delimited list of keywords to scrub in addition to the default parsers. diff -Nru sosreport-4.4/man/en/sos-report.1 sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-report.1 --- sosreport-4.4/man/en/sos-report.1 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/man/en/sos-report.1 2023-05-26 22:32:49.000000000 +0530 @@ -27,11 +27,13 @@ [--list-profiles]\fR [--verify]\fR [--log-size]\fR + [--journal-size]\fR [--all-logs]\fR [--since YYYYMMDD[HHMMSS]]\fR [--skip-commands commands]\fR [--skip-files files]\fR [--allow-system-changes]\fR + [--low-priority]\fR [-z|--compression-type method]\fR [--encrypt]\fR [--encrypt-key KEY]\fR @@ -185,14 +187,23 @@ causes sos to capture the last X amount of the file or command output collected. By default, this is set to 25 MiB and applies to all files and command output collected -with the exception of journal collections, which are limited to 100 MiB. +with the exception of journal collections, which are limited by the \fB--journal-size\fR +option instead. Setting this value to 0 removes all size limitations, and any files or commands collected will be collected in their entirety, which may drastically increase the size of the final sos report tarball and the memory usage of sos during collection -of commands, such as very large journals that may be several GiB in size. +of commands. .TP +.B \--journal-size +Places a limit on the size of journals collected in MiB. Note that this causes sos +to capture the last X amount of the journal. + +By default, this is set to 100 MiB. Setting this value to 0 removes all size limitations, +as does the use of the \fB--all-logs\fR option. This may drastically increase the size +of the final sos report tarball. +.TP .B \--all-logs Tell plugins to collect all possible log data ignoring any size limits and including logs in non-default locations. This option may significantly @@ -220,6 +231,11 @@ .B \--allow-system-changes Run commands even if they can change the system (e.g. load kernel modules). .TP +.B \--low-priority +Set sos to execute as a low priority process so that is does not interfere with +other processes running on the system. Specific distributions may set their own +constraints, but by default this involves setting process niceness to 19 and, if +available, setting an idle IO class via ionice. .B \-z, \--compression-type METHOD Override the default compression type specified by the active policy. .TP diff -Nru sosreport-4.4/man/en/sosreport.1 sosreport-4.5.4ubuntu0.20.04.1/man/en/sosreport.1 --- sosreport-4.4/man/en/sosreport.1 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/man/en/sosreport.1 2023-05-26 22:32:49.000000000 +0530 @@ -27,11 +27,13 @@ [--list-profiles]\fR [--verify]\fR [--log-size]\fR + [--journal-size]\fR [--all-logs]\fR [--since YYYYMMDD[HHMMSS]]\fR [--skip-commands commands]\fR [--skip-files files]\fR [--allow-system-changes]\fR + [--low-priority]\fR [-z|--compression-type method]\fR [--encrypt]\fR [--encrypt-key KEY]\fR @@ -185,14 +187,23 @@ causes sos to capture the last X amount of the file or command output collected. By default, this is set to 25 MiB and applies to all files and command output collected -with the exception of journal collections, which are limited to 100 MiB. +with the exception of journal collections, which are limited by the \fB--journal-size\fR +option instead. Setting this value to 0 removes all size limitations, and any files or commands collected will be collected in their entirety, which may drastically increase the size of the final sos report tarball and the memory usage of sos during collection -of commands, such as very large journals that may be several GiB in size. +of commands. .TP +.B \--journal-size +Places a limit on the size of journals collected in MiB. Note that this causes sos +to capture the last X amount of the journal. + +By default, this is set to 100 MiB. Setting this value to 0 removes all size limitations, +as does the use of the \fB--all-logs\fR option. This may drastically increase the size +of the final sos report tarball. +.TP .B \--all-logs Tell plugins to collect all possible log data ignoring any size limits and including logs in non-default locations. This option may significantly @@ -220,6 +231,11 @@ .B \--allow-system-changes Run commands even if they can change the system (e.g. load kernel modules). .TP +.B \--low-priority +Set sos to execute as a low priority process so that is does not interfere with +other processes running on the system. Specific distributions may set their own +constraints, but by default this involves setting process niceness to 19 and, if +available, setting an idle IO class via ionice. .B \-z, \--compression-type METHOD Override the default compression type specified by the active policy. .TP diff -Nru sosreport-4.4/.packit.yaml sosreport-4.5.4ubuntu0.20.04.1/.packit.yaml --- sosreport-4.4/.packit.yaml 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/.packit.yaml 2023-05-26 22:32:49.000000000 +0530 @@ -1,25 +1,25 @@ +upstream_project_url: https://github.com/sosreport/sos +specfile_path: sos.spec downstream_package_name: sos +upstream_package_name: sos + +files_to_sync: + - sos.spec + - .packit.yaml + +srpm_build_deps: + - python3-devel + - gettext + jobs: -- job: copr_build - metadata: - targets: - - fedora-development-x86_64 - - fedora-development-aarch64 - - fedora-development-ppc64le - - fedora-development-s390x - trigger: pull_request -- job: tests - trigger: pull_request - metadata: + - job: copr_build + trigger: pull_request targets: - - fedora-all - - epel-8 - - epel-9 -specfile_path: sos.spec -synced_files: -- sos.spec -- .packit.yaml -upstream_package_name: sos + - fedora-development-x86_64 + - fedora-development-aarch64 + - fedora-development-ppc64le + - fedora-development-s390x + notifications: pull_request: successful_build: true diff -Nru sosreport-4.4/README.md sosreport-4.5.4ubuntu0.20.04.1/README.md --- sosreport-4.4/README.md 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/README.md 2023-05-26 22:32:49.000000000 +0530 @@ -1,4 +1,4 @@ -[![Build Status](https://api.cirrus-ci.com/github/sosreport/sos.svg?branch=main)](https://cirrus-ci.com/github/sosreport/sos) [![Documentation Status](https://readthedocs.org/projects/sos/badge/?version=main)](https://sos.readthedocs.io/en/main/?badge=main) [![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/sosreport/sos.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/sosreport/sos/context:python) +[![Build Status](https://api.cirrus-ci.com/github/sosreport/sos.svg?branch=main)](https://cirrus-ci.com/github/sosreport/sos) [![Documentation Status](https://readthedocs.org/projects/sos/badge/?version=main)](https://sos.readthedocs.io/en/main/?badge=main) # SoS diff -Nru sosreport-4.4/requirements.txt sosreport-4.5.4ubuntu0.20.04.1/requirements.txt --- sosreport-4.4/requirements.txt 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/requirements.txt 2023-05-26 22:32:49.000000000 +0530 @@ -2,5 +2,6 @@ coverage>=4.0.3 Sphinx>=1.3.5 pexpect>=4.0.0 -python_magic>=0.4.20 pyyaml +setuptools + diff -Nru sosreport-4.4/setup.py sosreport-4.5.4ubuntu0.20.04.1/setup.py --- sosreport-4.4/setup.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/setup.py 2023-05-26 22:32:49.000000000 +0530 @@ -1,84 +1,16 @@ #!/usr/bin/env python -from distutils.core import setup -from distutils.command.build import build -from distutils.command.install_data import install_data -from distutils.dep_util import newer -from distutils.log import error - -import glob -import os -import re -import subprocess -import sys - +from setuptools import setup, find_packages from sos import __version__ as VERSION -PO_DIR = 'po' -MO_DIR = os.path.join('build', 'mo') - -class BuildData(build): - def run(self): - build.run(self) - for po in glob.glob(os.path.join(PO_DIR, '*.po')): - lang = os.path.basename(po[:-3]) - mo = os.path.join(MO_DIR, lang, 'sos.mo') - - directory = os.path.dirname(mo) - if not os.path.exists(directory): - os.makedirs(directory) - - if newer(po, mo): - try: - rc = subprocess.call(['msgfmt', '-o', mo, po]) - if rc != 0: - raise Warning("msgfmt returned %d" % (rc,)) - except Exception as e: - error("Failed gettext.") - sys.exit(1) - -class InstallData(install_data): - def run(self): - self.data_files.extend(self._find_mo_files()) - install_data.run(self) - - def _find_mo_files(self): - data_files = [] - for mo in glob.glob(os.path.join(MO_DIR, '*', 'sos.mo')): - lang = os.path.basename(os.path.dirname(mo)) - dest = os.path.join('share', 'locale', lang, 'LC_MESSAGES') - data_files.append((dest, [mo])) - return data_files - - # Workaround https://bugs.python.org/issue644744 - def copy_file (self, filename, dirname): - (out, _) = install_data.copy_file(self, filename, dirname) - # match for man pages - if re.search(r'/man/man\d/.+\.\d$', out): - return (out+".gz", _) - return (out, _) - -cmdclass = {'build': BuildData, 'install_data': InstallData} -command_options = {} -try: - from sphinx.setup_command import BuildDoc - cmdclass['build_sphinx'] = BuildDoc - command_options={ - 'build_sphinx': { - 'project': ('setup.py', 'sos'), - 'version': ('setup.py', VERSION), - 'source_dir': ('setup.py', 'docs') - } - } -except Exception: - print("Unable to build sphinx docs - module not present. Install sphinx " - "to enable documentation generation") setup( name='sos', version=VERSION, - description=("""A set of tools to gather troubleshooting""" - """ information from a system."""), + install_requires=['pexpect', 'pyyaml'], + description=( + 'A set of tools to gather troubleshooting information from a system' + ), author='Bryn M. Reeves', author_email='bmr@redhat.com', maintainer='Jake Hunsaker', @@ -94,21 +26,9 @@ ('share/man/man5', ['man/en/sos.conf.5']), ('share/licenses/sos', ['LICENSE']), ('share/doc/sos', ['AUTHORS', 'README.md']), - ('config', ['sos.conf']) + ('config', ['sos.conf', 'tmpfiles/tmpfilesd-sos-rh.conf']) ], - packages=[ - 'sos', 'sos.presets', 'sos.presets.redhat', 'sos.policies', - 'sos.policies.distros', 'sos.policies.runtimes', - 'sos.policies.package_managers', 'sos.policies.init_systems', - 'sos.report', 'sos.report.plugins', 'sos.collector', - 'sos.collector.clusters', 'sos.collector.transports', 'sos.cleaner', - 'sos.cleaner.mappings', 'sos.cleaner.parsers', 'sos.cleaner.archives', - 'sos.help' - ], - cmdclass=cmdclass, - command_options=command_options, - requires=['pexpect', 'python_magic', 'pyyaml'] - ) - + packages=find_packages(include=['sos', 'sos.*']) +) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/snap/snapcraft.yaml sosreport-4.5.4ubuntu0.20.04.1/snap/snapcraft.yaml --- sosreport-4.4/snap/snapcraft.yaml 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/snap/snapcraft.yaml 2023-05-26 22:32:49.000000000 +0530 @@ -5,35 +5,38 @@ primarily aimed at Linux distributions and other UNIX-like operating systems. grade: stable -base: core20 +base: core22 confinement: classic adopt-info: sos +license: GPL-2.0-or-later +environment: + PYTHONPATH: ${SNAP}/lib/python3.10/site-packages:${SNAP}/usr/lib/python3/dist-packages:${PYTHONPATH} parts: sos: plugin: python source: . override-pull: | - snapcraftctl pull - snapcraftctl set-version $(git describe --tags --always) + craftctl default + craftctl set version="$(git describe --tags --always)" build-packages: - git - python3 - snapcraft - gettext stage-packages: - - python3-magic + - python3-venv + python-packages: + - pip + - setuptools + - wheel + - python_magic + - packaging apps: sos: - environment: - PYTHONPATH: ${PYTHONPATH}:${SNAP}/lib/python3.8/site-packages:${SNAP}/usr/lib/python3/dist-packages command: bin/sos sosreport: - environment: - PYTHONPATH: ${PYTHONPATH}:${SNAP}/lib/python3.8/site-packages:${SNAP}/usr/lib/python3/dist-packages command: bin/sos report sos-collector: - environment: - PYTHONPATH: ${PYTHONPATH}:${SNAP}/lib/python3.8/site-packages:${SNAP}/usr/lib/python3/dist-packages command: bin/sos collector diff -Nru sosreport-4.4/sos/archive.py sosreport-4.5.4ubuntu0.20.04.1/sos/archive.py --- sosreport-4.4/sos/archive.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/archive.py 2023-05-26 22:32:49.000000000 +0530 @@ -16,6 +16,7 @@ import codecs import errno import stat +import re from datetime import datetime from threading import Lock @@ -658,6 +659,9 @@ orig_path = tarinfo.name[len(os.path.split(self._archive_root)[-1]):] if not orig_path: orig_path = self._archive_root + skips = ['/version.txt$', '/sos_logs(/.*)?', '/sos_reports(/.*)?'] + if any(re.match(skip, orig_path) for skip in skips): + return None try: fstat = os.stat(orig_path) except OSError: @@ -697,6 +701,15 @@ kwargs = {'preset': 3} tar = tarfile.open(self._archive_name, mode="w:%s" % _comp_mode, **kwargs) + # add commonly reviewed files first, so that they can be more easily + # read from memory without needing to extract the whole archive + for _content in ['version.txt', 'sos_reports', 'sos_logs']: + if not os.path.exists(os.path.join(self._archive_root, _content)): + continue + tar.add( + os.path.join(self._archive_root, _content), + arcname=f"{self._name}/{_content}" + ) # we need to pass the absolute path to the archive root but we # want the names used in the archive to be relative. tar.add(self._archive_root, arcname=self._name, diff -Nru sosreport-4.4/sos/cleaner/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/__init__.py --- sosreport-4.4/sos/cleaner/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -25,6 +25,7 @@ from sos.cleaner.parsers.hostname_parser import SoSHostnameParser from sos.cleaner.parsers.keyword_parser import SoSKeywordParser from sos.cleaner.parsers.username_parser import SoSUsernameParser +from sos.cleaner.parsers.ipv6_parser import SoSIPv6Parser from sos.cleaner.archives.sos import (SoSReportArchive, SoSReportDirectory, SoSCollectorArchive, SoSCollectorDirectory) @@ -54,11 +55,14 @@ that future iterations will maintain the same consistent obfuscation pairing. - In the case of IP addresses, support is for IPv4 and efforts are made to - keep network topology intact so that later analysis is as accurate and + In the case of IP addresses, support is for IPv4 and IPv6 - effort is made + to keep network topology intact so that later analysis is as accurate and easily understandable as possible. If an IP address is encountered that we cannot determine the netmask for, a random IP address is used instead. + For IPv6, note that IPv4-mapped addresses, e.g. ::ffff:10.11.12.13, are + NOT supported currently, and will remain unobfuscated. + For hostnames, domains are obfuscated as whole units, leaving the TLD in place. @@ -111,6 +115,8 @@ # when obfuscating a SoSCollector run during archive extraction os.makedirs(os.path.join(self.tmpdir, 'cleaner'), exist_ok=True) + self.validate_parser_values() + self.cleaner_mapping = self.load_map_file() os.umask(0o77) self.in_place = in_place @@ -121,6 +127,7 @@ self.parsers = [ SoSHostnameParser(self.cleaner_mapping, self.opts.domains), SoSIPParser(self.cleaner_mapping), + SoSIPv6Parser(self.cleaner_mapping), SoSMacParser(self.cleaner_mapping), SoSKeywordParser(self.cleaner_mapping, self.opts.keywords, self.opts.keyword_file), @@ -132,7 +139,7 @@ _loaded_name = _loaded.name.lower().split('parser')[0].strip() if _parser.lower().strip() == _loaded_name: self.log_info("Disabling parser: %s" % _loaded_name) - self.ui_log.warn( + self.ui_log.warning( "Disabling the '%s' parser. Be aware that this may " "leave sensitive plain-text data in the archive." % _parser @@ -316,6 +323,18 @@ if self.nested_archive: self.nested_archive.ui_name = self.nested_archive.description + def validate_parser_values(self): + """Check any values passed to the parsers via the commandline, e.g. + the --domains option, to ensure that they are valid for the parser in + question. + """ + for _dom in self.opts.domains: + if len(_dom.split('.')) < 2: + raise Exception( + f"Invalid value '{_dom}' given: --domains values must be " + "actual domains" + ) + def execute(self): """SoSCleaner will begin by inspecting the TARGET option to determine if it is a directory, archive, or archive of archives. @@ -381,22 +400,27 @@ cf.write(checksum) self.write_cleaner_log() - final_path = self.obfuscate_string( - os.path.join(self.sys_tmp, arc_path.split('/')[-1]) + final_path = os.path.join( + self.sys_tmp, + self.obfuscate_string(arc_path.split('/')[-1]) ) shutil.move(arc_path, final_path) arcstat = os.stat(final_path) - # logging will have been shutdown at this point - print("A mapping of obfuscated elements is available at\n\t%s" - % map_path) - - print("\nThe obfuscated archive is available at\n\t%s\n" % final_path) - print("\tSize\t%s" % get_human_readable(arcstat.st_size)) - print("\tOwner\t%s\n" % getpwuid(arcstat.st_uid).pw_name) + # while these messages won't be included in the log file in the archive + # some facilities, such as our avocado test suite, will sometimes not + # capture print() output, so leverage the ui_log to print to console + self.ui_log.info( + f"A mapping of obfuscated elements is available at\n\t{map_path}" + ) + self.ui_log.info( + f"\nThe obfuscated archive is available at\n\t{final_path}\n" + ) - print("Please send the obfuscated archive to your support " - "representative and keep the mapping file private") + self.ui_log.info(f"\tSize\t{get_human_readable(arcstat.st_size)}") + self.ui_log.info(f"\tOwner\t{getpwuid(arcstat.st_uid).pw_name}\n") + self.ui_log.info("Please send the obfuscated archive to your support " + "representative and keep the mapping file private") self.cleanup() @@ -433,7 +457,7 @@ _map = {} for parser in self.parsers: _map[parser.map_file_key] = {} - _map[parser.map_file_key].update(parser.mapping.dataset) + _map[parser.map_file_key].update(parser.get_map_contents()) return _map @@ -714,7 +738,7 @@ _skip.match(short_name) for _skip in _p.skip_patterns ) ] - with open(filename, 'r') as fname: + with open(filename, 'r', errors='replace') as fname: for line in fname: try: line, count = self.obfuscate_line(line, _parsers) diff -Nru sosreport-4.4/sos/cleaner/mappings/hostname_map.py sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/mappings/hostname_map.py --- sosreport-4.4/sos/cleaner/mappings/hostname_map.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/mappings/hostname_map.py 2023-05-26 22:32:49.000000000 +0530 @@ -147,7 +147,9 @@ if item in self.dataset: return self.dataset[item] if not self.domain_name_in_loaded_domains(item.lower()): - return item + # no match => return the original string with optional + # leading/trailing '.' or '_' characters + return ''.join([prefix, item, suffix]) if item.endswith(self.strip_exts): ext = '.' + item.split('.')[-1] item = item.replace(ext, '') diff -Nru sosreport-4.4/sos/cleaner/mappings/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/mappings/__init__.py --- sosreport-4.4/sos/cleaner/mappings/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/mappings/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -39,7 +39,7 @@ if not item or item in self.skip_keys or item in self.dataset.values(): return True for skip in self.ignore_matches: - if re.match(skip, item): + if re.match(skip, item, re.I): return True def add(self, item): @@ -94,7 +94,7 @@ :returns: A compiled regex pattern for the item :rtype: ``re.Pattern`` """ - return re.compile(item, re.I) + return re.compile(re.escape(item), re.I) def sanitize_item(self, item): """Perform the obfuscation relevant to the item being added to the map. diff -Nru sosreport-4.4/sos/cleaner/mappings/ipv6_map.py sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/mappings/ipv6_map.py --- sosreport-4.4/sos/cleaner/mappings/ipv6_map.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/mappings/ipv6_map.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,279 @@ +# Copyright 2022 Red Hat, Inc. Jake Hunsaker + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +import ipaddress + +from random import getrandbits +from sos.cleaner.mappings import SoSMap + + +def generate_hextets(hextets): + """Generate a random set of hextets, based on the length of the source + hextet. If any hextets are compressed, keep that compression. + + E.G. '::1234:bcd' will generate a leading empty '' hextet, followed by two + 4-character hextets. + + :param hextets: The extracted hextets from a source address + :type hextets: ``list`` + + :returns: A set of randomized hextets for use in an obfuscated + address + :rtype: ``list`` + """ + return [random_hex(4) if h else '' for h in hextets] + + +def random_hex(length): + """Generate a string of size length of random hex characters. + + :param length: The number of characters to generate + :type length: ``int`` + + :returns: A string of ``length`` hex characters + :rtype: ``str`` + """ + return f"{getrandbits(4*length):0{length}x}" + + +class SoSIPv6Map(SoSMap): + """Mapping for IPv6 addresses and networks. + + Much like the IP map handles IPv4 addresses, this map is designed to take + IPv6 strings and obfuscate them consistently to maintain network topology. + To do this, addresses will be manipulated by the ipaddress library. + + If an IPv6 address is encountered without a netmask, it is assumed to be a + /64 address. + """ + + networks = {} + + ignore_matches = [ + r'^::1/.*', + r'::/0', + r'fd53:.*', + r'^53..:' + ] + + first_hexes = ['534f'] + + compile_regexes = False + version = 1 + + def conf_update(self, config): + """Override the base conf_update() so that we can load the existing + networks into ObfuscatedIPv6Network() objects for the current run. + """ + if 'networks' not in config: + return + for network in config['networks']: + _orig = ipaddress.ip_network(network) + _obfuscated = config['networks'][network]['obfuscated'] + _net = self._get_network(_orig, _obfuscated) + self.dataset[_net.original_address] = _net.obfuscated_address + for host in config['networks'][network]['hosts']: + _ob_host = config['networks'][network]['hosts'][host] + _net.add_obfuscated_host_address(host, _ob_host) + self.dataset[host] = _ob_host + + def sanitize_item(self, ipaddr): + _prefix = ipaddr.split('/')[-1] if '/' in ipaddr else '' + _ipaddr = ipaddr + if not _prefix: + # assume a /64 default per protocol + _ipaddr += "/64" + try: + _addr = ipaddress.ip_network(_ipaddr) + # ipaddr was an actual network per protocol + _net = self._get_network(_addr) + _ipaddr = _net.obfuscated_address + except ValueError: + # A ValueError is raised from the ipaddress module when passing + # an address such as 2620:52:0:2d80::4fe/64, which has host bits + # '::4fe' set - the /64 is generally interpreted only for network + # addresses. We use this behavior to properly obfuscate the network + # before obfuscating a host address within that network + _addr = ipaddress.ip_network(_ipaddr, strict=False) + _net = self._get_network(_addr) + if _net.network_addr not in self.dataset: + self.dataset[_net.original_address] = _net.obfuscated_address + # then, get the address within the network + _hostaddr = ipaddress.ip_address(_ipaddr.split('/')[0]) + _ipaddr = _net.obfuscate_host_address(_hostaddr) + + if _prefix and '/' not in _ipaddr: + return f"{_ipaddr}/{_prefix}" + return _ipaddr + + def _get_network(self, address, obfuscated=''): + """Attempt to find an existing ObfuscatedIPv6Network object from which + to either find an existing obfuscated match, or create a new one. If + no such object already exists, create it. + """ + _addr = address.compressed + if _addr not in self.networks: + self.networks[_addr] = ObfuscatedIPv6Network(address, obfuscated, + self.first_hexes) + return self.networks[_addr] + + +class ObfuscatedIPv6Network(): + """An abstraction class that represents a network that is (to be) handled + by sos. + + Each distinct IPv6 network that we encounter will have a representative + instance of this class, from which new obfuscated subnets and host + addresses will be generated. + + This class should be built from an ``ipaddress.IPv6Network`` object. If + an obfuscation string is not passed, one will be created during init. + """ + + def __init__(self, addr, obfuscation='', used_hexes=None): + """Basic setup for the obfuscated network. Minor validation on the addr + used to create the instance, as well as on an optional ``obfuscation`` + which if set, will serve as the obfuscated_network address. + + :param addr: The *un*obfuscated network to be handled + :type addr: ``ipaddress.IPv6Network`` + + :param obfuscation: An optional pre-determined string representation of + the obfuscated network address + :type obfuscation: ``str`` + + :param used_hexes: A list of already used hexes for the first hextet + of a potential global address obfuscation + :type used_hexes: ``list`` + """ + if not isinstance(addr, ipaddress.IPv6Network): + raise Exception('Invalid network: not an IPv6Network object') + self.addr = addr + self.prefix = addr.prefixlen + self.network_addr = addr.network_address.compressed + self.hosts = {} + if used_hexes is None: + self.first_hexes = ['534f'] + else: + self.first_hexes = used_hexes + if not obfuscation: + self._obfuscated_network = self._obfuscate_network_address() + else: + if not isinstance(obfuscation, str): + raise TypeError(f"Pre-determined obfuscated network address " + f"must be str, not {type(obfuscation)}") + self._obfuscated_network = obfuscation.split('/')[0] + + @property + def obfuscated_address(self): + return f"{self._obfuscated_network}/{self.prefix}" + + @property + def original_address(self): + return self.addr.compressed + + def _obfuscate_network_address(self): + """Generate the obfuscated pair for the network address. This is + determined based on the netmask of the network this class was built + on top of. + """ + if self.addr.is_global: + return self._obfuscate_global_address() + elif self.addr.is_link_local: + # link-local addresses are always fe80::/64. This is not sensitive + # in itself, and retaining the information that an address is a + # link-local address is important for problem analysis, so don't + # obfuscate this network information. + return self.network_addr + elif self.addr.is_private: + return self._obfuscate_private_address() + return self.network_addr + + def _obfuscate_global_address(self): + """Global unicast addresses have a 48-bit global routing prefix and a + 16-bit subnet. We set the global routing prefix to a static + sos-specific identifier that could never be seen in the wild, + '534f:' + + We then randomize the subnet hextet. + """ + _hextets = self.network_addr.split(':')[1:] + _ob_hex = ['534f'] + if all(not c for c in _hextets): + # we have only a single defined hextet, e.g. ff00::/64, so we need + # to not use the standard first-hex identifier or we'll overlap + # every similar address obfuscation. + # Set the leading bits to 53, but increment upwards from there for + # when we exceed 256 networks obfuscated in this manner. + _start = 53 + (len(self.first_hexes) // 256) + _ob_hex = f"{_start}{random_hex(2)}" + while _ob_hex in self.first_hexes: + # prevent duplicates + _ob_hex = f"{_start}{random_hex(2)}" + self.first_hexes.append(_ob_hex) + _ob_hex = [_ob_hex] + _ob_hex.extend(generate_hextets(_hextets)) + return ':'.join(_ob_hex) + + def _obfuscate_private_address(self): + """The first 8 bits will always be 'fd', the next 40 bits are meant + to be a global ID, followed by 16 bits for the subnet. To keep things + relatively simply we maintain the first hextet as 'fd53', and then + randomize any remaining hextets + """ + _hextets = self.network_addr.split(':')[1:] + _ob_hex = ['fd53'] + _ob_hex.extend(generate_hextets(_hextets)) + return ':'.join(_ob_hex) + + def obfuscate_host_address(self, addr): + """Given an unobfuscated address, generate an obfuscated match for it, + and save it to this network for tracking during the execution of clean. + + Note: another way to do this would be to convert the obfuscated network + to bytes, and add a random amount to that based on the number of + addresses that the network can support and from that new bytes count + craft a new IPv6 address. This has the advantage of absolutely + guaranteeing the new address is within the network space (whereas the + method employed below could *theoretically* generate an overlapping + address), but would in turn remove any ability to compress obfuscated + addresses to match the general format/syntax of the address it is + replacing. For the moment, it is assumed that being able to maintain a + quick mental note of "unobfuscated device ff00::1 is obfuscated device + 53ad::a1b2" is more desireable than "ff00::1 is now obfuscated as + 53ad::1234:abcd:9876:a1b2:". + + :param addr: The unobfuscated IPv6 address + :type addr: ``ipaddress.IPv6Address`` + + :returns: An obfuscated address within this network + :rtype: ``str`` + """ + def _generate_address(): + return ''.join([ + self._obfuscated_network, + ':'.join(generate_hextets(_host.split(':'))) + ]) + + if addr.compressed not in self.hosts: + # separate host from the address by removing its network prefix + _n = self.network_addr.rstrip(':') + _host = addr.compressed[len(_n):].lstrip(':') + _ob_host = _generate_address() + while _ob_host in self.hosts.values(): + _ob_host = _generate_address() + self.add_obfuscated_host_address(addr.compressed, _ob_host) + return self.hosts[addr.compressed] + + def add_obfuscated_host_address(self, host, obfuscated): + """Adds an obfuscated pair to the class for tracking and ongoing + consistency in obfuscation. + """ + self.hosts[host] = obfuscated diff -Nru sosreport-4.4/sos/cleaner/parsers/hostname_parser.py sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/parsers/hostname_parser.py --- sosreport-4.4/sos/cleaner/parsers/hostname_parser.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/parsers/hostname_parser.py 2023-05-26 22:32:49.000000000 +0530 @@ -8,6 +8,7 @@ # # See the LICENSE file in the source distribution for further information. +import re from sos.cleaner.parsers import SoSCleanerParser from sos.cleaner.mappings.hostname_map import SoSHostnameMap @@ -29,6 +30,24 @@ self.load_short_names_from_mapping() self.mapping.set_initial_counts() + def parse_line(self, line): + """This will be called for every line in every file we process, so that + every parser has a chance to scrub everything. + + We are overriding parent method since we need to swap ordering of + _parse_line_with_compiled_regexes and _parse_line calls. + """ + count = 0 + for skip_pattern in self.skip_line_patterns: + if re.match(skip_pattern, line, re.I): + return line, count + line, _count = self._parse_line(line) + count += _count + if self.compile_regexes: + line, _rcount = self._parse_line_with_compiled_regexes(line) + count += _rcount + return line, count + def load_short_names_from_mapping(self): """When we load the mapping file into the hostname map, we have to do some dancing to get those loaded properly into the "intermediate" dicts diff -Nru sosreport-4.4/sos/cleaner/parsers/ipv6_parser.py sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/parsers/ipv6_parser.py --- sosreport-4.4/sos/cleaner/parsers/ipv6_parser.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/cleaner/parsers/ipv6_parser.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,60 @@ +# Copyright 2022 Red Hat, Inc. Jake Hunsaker + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.cleaner.parsers import SoSCleanerParser +from sos.cleaner.mappings.ipv6_map import SoSIPv6Map + + +class SoSIPv6Parser(SoSCleanerParser): + """Parser for handling IPv6 networks and addresses""" + + name = 'IPv6 Parser' + map_file_key = 'ipv6_map' + regex_patterns = [ + # Attention: note that this is a single long regex, not several entries + # This is initially based off of two regexes from the Java library + # for validating an IPv6 string. However, this is modified to begin and + # end with a negative lookbehind to ensure that a substring of 'ed::' + # is not extracted from a log message such as 'SomeFuncUsed::ADiffFunc' + # that come components may log with. Further, we optionally try to grab + # a trailing prefix for the network bits. + r"(? + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +import json + +from sos.collector.clusters import Cluster + + +class ceph(Cluster): + """ + This cluster profile is for Ceph Storage clusters, and is primarily + built around Red Hat Ceph Storage 5. Nodes are enumerated via `cephadm`; if + your Ceph deployment uses cephadm but is not RHCS 5, this profile may work + as intended, but it is not currently guaranteed to do so. If you are using + such an environment and this profile does not work for you, please file a + bug report detailing what is failing. + + By default, all nodes in the cluster will be returned for collection. This + may not be desirable, so users are encouraged to use the `labels` option + to specify a colon-delimited set of ceph node labels to restrict the list + of nodes to. + + For example, using `-c ceph.labels=osd:mgr` will return only nodes labeled + with *either* `osd` or `mgr`. + """ + + cluster_name = 'Ceph Storage Cluster' + sos_plugins = [ + 'ceph_common', + ] + sos_options = {'log-size': 50} + packages = ('cephadm',) + option_list = [ + ('labels', '', 'Colon delimited list of labels to select nodes with') + ] + + def get_nodes(self): + self.nodes = [] + ceph_out = self.exec_primary_cmd( + 'cephadm shell -- ceph orch host ls --format json', + need_root=True + ) + + if not ceph_out['status'] == 0: + self.log_error( + f"Could not enumerate nodes via cephadm: {ceph_out['output']}" + ) + return self.nodes + + nodes = json.loads(ceph_out['output'].splitlines()[-1]) + _labels = [lab for lab in self.get_option('labels').split(':') if lab] + for node in nodes: + if _labels and not any(_l in node['labels'] for _l in _labels): + self.log_debug(f"{node} filtered from list due to labels") + continue + self.nodes.append(node['hostname']) + + return self.nodes + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/collector/clusters/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/clusters/__init__.py --- sosreport-4.4/sos/collector/clusters/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/clusters/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -41,6 +41,9 @@ :cvar sos_plugins: Which plugins to forcibly enable for node reports :vartype sos_plugins: ``list`` + :cvar sos_options: Options to pass to report on every node + :vartype sos_options: ``dict`` + :cvar sos_plugin_options: Plugin options to forcibly set for nodes :vartype sos_plugin_options: ``dict`` @@ -54,6 +57,7 @@ option_list = [] packages = ('',) sos_plugins = [] + sos_options = {} sos_plugin_options = {} sos_preset = '' cluster_name = None @@ -116,6 +120,10 @@ newline=False ) + if cls.sos_options: + _opts = ', '.join(f'--{k} {v}' for k, v in cls.sos_options.items()) + section.add_text(f"Sets the following sos options: {_opts}") + if cls.sos_plugins: section.add_text( "Enables the following plugins: %s" @@ -212,7 +220,7 @@ def log_warn(self, msg): """Used to print warning messages""" - self.soslog.warn(self._fmt_msg(msg)) + self.soslog.warning(self._fmt_msg(msg)) def get_option(self, option): """ @@ -392,13 +400,14 @@ """ try: nodes = self.get_nodes() - except Exception as e: - self.log_error('Cluster failed to enumerate nodes: %s' % e) - raise + except Exception as err: + raise Exception(f"Cluster failed to enumerate nodes: {err}") if isinstance(nodes, list): node_list = [n.strip() for n in nodes if n] elif isinstance(nodes, str): node_list = [n.split(',').strip() for n in nodes] + else: + raise Exception(f"Cluster returned unexpected node list: {nodes}") node_list = list(set(node_list)) for node in node_list: if node.startswith(('-', '_', '(', ')', '[', ']', '/', '\\')): diff -Nru sosreport-4.4/sos/collector/clusters/juju.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/clusters/juju.py --- sosreport-4.4/sos/collector/clusters/juju.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/clusters/juju.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,230 @@ +# Copyright (c) 2023 Canonical Ltd., Chi Wai Chan + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +import logging +import json +import re + +from sos.collector.clusters import Cluster + + +def _parse_option_string(strings=None): + """Parse comma separated string.""" + if not strings: + return [] + return [string.strip() for string in strings.split(",")] + + +def _get_index(model_name): + """Helper function to get Index. + + The reason why we need Index defined in function is because currently + the collector.__init__ will load all the classes in this module + and also Index. This will cause bug because it think Index is + Cluster type. Also We don't want to provide a customized + filter to remove Index class. + """ + + class Index: + """Index structure to help parse juju status output. + + Attributes apps, units and machines are dict which key + is the app/unit/machine name + and the value is list of targets which format are + {model_name}:{machine_id}. + """ + + def __init__(self, model_name): + self.model_name: str = model_name + self.apps = {} + self.units = {} + self.machines = {} + self.ui_log = logging.getLogger("sos") + + def add_principals(self, juju_status): + """Adds principal units to index.""" + for app, app_info in juju_status["applications"].items(): + nodes = [] + units = app_info.get("units", {}) + for unit, unit_info in units.items(): + machine = unit_info["machine"] + node = f"{self.model_name}:{machine}" + self.units[unit] = [node] + self.machines[machine] = [node] + nodes.append(node) + + self.apps[app] = nodes + + def add_subordinates(self, juju_status): + """Add subordinates to index. + + Since subordinates does not have units they need to be + manually added. + """ + for app, app_info in juju_status["applications"].items(): + subordinate_to = app_info.get("subordinate-to", []) + for parent in subordinate_to: + # If parent is missing + if not self.apps.get(parent): + self.ui_log.warning( + f"Principal charm {parent} is missing" + ) + continue + self.apps[app].extend(self.apps[parent]) + + # If parent's units is missing + if "units" not in juju_status["applications"][parent]: + self.ui_log.warning( + f"Principal charm {parent} is missing units" + ) + continue + + units = juju_status["applications"][parent]["units"] + for unit, unit_info in units.items(): + node = f"{self.model_name}:{unit_info['machine']}" + for sub_key, sub_value in unit_info.get( + "subordinates", {} + ).items(): + if sub_key.startswith(app + "/"): + self.units[sub_key] = [node] + + def add_machines(self, juju_status): + """Add machines to index. + + If model does not have any applications it needs to be + manually added. + """ + for machine in juju_status["machines"].keys(): + node = f"{self.model_name}:{machine}" + self.machines[machine] = [node] + + return Index(model_name) + + +class juju(Cluster): + """ + The juju cluster profile is intended to be used on juju managed clouds. + It"s assumed that `juju` is installed on the machine where `sos` is called, + and that the juju user has superuser privilege to the current controller. + + By default, the sos reports will be collected from all the applications in + the current model. If necessary, you can filter the nodes by models / + applications / units / machines with cluster options. + + Example: + + sos collect --cluster-type juju -c "juju.models=sos" -c "juju.apps=a,b,c" + + """ + + cmd = "juju" + cluster_name = "Juju Managed Clouds" + option_list = [ + ("apps", "", "Filter node list by apps (comma separated regex)."), + ("units", "", "Filter node list by units (comma separated string)."), + ("models", "", "Filter node list by models (comma separated string)."), + ( + "machines", + "", + "Filter node list by machines (comma separated string).", + ), + ] + + def _cleanup_juju_output(self, output): + """Remove leading characters before {.""" + return re.sub(r"(^[^{]*)(.*)", "\\2", output, 0, re.MULTILINE) + + def _get_model_info(self, model_name): + """Parse juju status output and return target dict. + + Here are couple helper functions to parse the juju principals units, + subordinate units and machines. + """ + juju_status = self._execute_juju_status(model_name) + + index = _get_index(model_name=model_name) + index.add_principals(juju_status) + index.add_subordinates(juju_status) + index.add_machines(juju_status) + + return index + + def _execute_juju_status(self, model_name): + model_option = f"-m {model_name}" if model_name else "" + format_option = "--format json" + status_cmd = f"{self.cmd} status {model_option} {format_option}" + res = self.exec_primary_cmd(status_cmd) + if not res["status"] == 0: + raise Exception(f"'{status_cmd}' returned error: {res['status']}") + juju_json_output = self._cleanup_juju_output((res["output"])) + + juju_status = None + try: + juju_status = json.loads(juju_json_output) + except json.JSONDecodeError: + raise Exception( + "Juju output is not valid json format." + f"Output: {juju_json_output}" + ) + return juju_status + + def _filter_by_pattern(self, key, patterns, model_info): + """Filter with regex match.""" + nodes = set() + for pattern in patterns: + for param, value in getattr(model_info, key).items(): + if re.match(pattern, param): + nodes.update(value or []) + return nodes + + def _filter_by_fixed(self, key, patterns, model_info): + """Filter with fixed match.""" + nodes = set() + for pattern in patterns: + for param, value in getattr(model_info, key).items(): + if pattern == param: + nodes.update(value or []) + return nodes + + def set_transport_type(self): + """Dynamically change transport to 'juju'.""" + return "juju" + + def get_nodes(self): + """Get the machine numbers from `juju status`.""" + models = _parse_option_string(self.get_option("models")) + apps = _parse_option_string(self.get_option("apps")) + units = _parse_option_string(self.get_option("units")) + machines = _parse_option_string(self.get_option("machines")) + filters = {"apps": apps, "units": units, "machines": machines} + + # Return empty nodes if no model and filter provided. + if not any(filters.values()) and not models: + return [] + + if not models: + models = [""] # use current model by default + + nodes = set() + + for model in models: + model_info = self._get_model_info(model) + for key, resource in filters.items(): + # Filter node by different policies + if key == "apps": + _nodes = self._filter_by_pattern(key, resource, model_info) + else: + _nodes = self._filter_by_fixed(key, resource, model_info) + nodes.update(_nodes) + + return list(nodes) + + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/collector/clusters/ocp.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/clusters/ocp.py --- sosreport-4.4/sos/collector/clusters/ocp.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/clusters/ocp.py 2023-05-26 22:32:49.000000000 +0530 @@ -86,7 +86,7 @@ _oc_path['output'].strip().lstrip('/') ) else: - self.log_warn( + self.log_warning( "Unable to to determine PATH for 'oc' command, " "node enumeration may fail." ) @@ -142,12 +142,32 @@ self.fmt_oc_cmd("new-project %s" % self.project) ) if ret['status'] == 0: + self._label_sos_project() return True self.log_debug("Failed to create project: %s" % ret['output']) raise Exception("Failed to create temporary project for collection. " "\nAborting...") + def _label_sos_project(self): + """Add pertinent labels to the temporary project we've created so that + our privileged containers can properly run. + """ + labels = [ + "security.openshift.io/scc.podSecurityLabelSync=false", + "pod-security.kubernetes.io/enforce=privileged" + ] + for label in labels: + ret = self.exec_primary_cmd( + self.fmt_oc_cmd( + f"label namespace {self.project} {label} --overwrite" + ) + ) + if not ret['status'] == 0: + raise Exception( + f"Error applying namespace labels: {ret['output']}" + ) + def cleanup(self): """Remove the project we created to execute within """ @@ -231,8 +251,9 @@ for node_name, node in self.node_dict.items(): if roles: for role in roles: - if role == node['roles']: + if role in node['roles']: nodes.append(node_name) + break else: nodes.append(node_name) else: diff -Nru sosreport-4.4/sos/collector/clusters/pacemaker.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/clusters/pacemaker.py --- sosreport-4.4/sos/collector/clusters/pacemaker.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/clusters/pacemaker.py 2023-05-26 22:32:49.000000000 +0530 @@ -11,7 +11,7 @@ import re from sos.collector.clusters import Cluster -from setuptools._vendor.packaging import version +from sos.utilities import parse_version from xml.etree import ElementTree @@ -63,7 +63,7 @@ _ver = self.exec_primary_cmd('crm_mon --version') if _ver['status'] == 0: cver = _ver['output'].split()[1].split('-')[0] - if not version.parse(cver) > version.parse('2.0.3'): + if not parse_version(cver) > parse_version('2.0.3'): xmlopt = '--as-xml' else: return diff -Nru sosreport-4.4/sos/collector/exceptions.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/exceptions.py --- sosreport-4.4/sos/collector/exceptions.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/exceptions.py 2023-05-26 22:32:49.000000000 +0530 @@ -104,6 +104,25 @@ super(InvalidTransportException, self).__init__(message) +class SaltStackMasterUnsupportedException(Exception): + """Raised when SaltStack Master is unsupported locally""" + + def __init__(self): + message = 'Master unsupported by local SaltStack installation' + super(SaltStackMasterUnsupportedException, self).__init__(message) + + +class JujuNotInstalledException(Exception): + """Raised when juju is not installed locally""" + + def __init__(self): + message = ( + 'Juju is not installed, ' + 'please ensure you have installed juju.' + ) + super(JujuNotInstalledException, self).__init__(message) + + __all__ = [ 'AuthPermissionDeniedException', 'CommandTimeoutException', @@ -113,7 +132,9 @@ 'ControlSocketMissingException', 'InvalidPasswordException', 'PasswordRequestException', + 'SaltStackMasterUnsupportedException', 'TimeoutPasswordAuthException', 'UnsupportedHostException', - 'InvalidTransportException' + 'InvalidTransportException', + 'JujuNotInstalledException' ] diff -Nru sosreport-4.4/sos/collector/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/__init__.py --- sosreport-4.4/sos/collector/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -88,12 +88,14 @@ 'image': '', 'force_pull_image': True, 'jobs': 4, + 'journal_size': 0, 'keywords': [], 'keyword_file': None, 'keep_binary_files': False, 'label': '', 'list_options': False, 'log_size': 0, + 'low_priority': False, 'map_file': '/etc/sos/cleaner/default_mapping', 'primary': '', 'namespaces': None, @@ -118,7 +120,6 @@ 'skip_commands': [], 'skip_files': [], 'skip_plugins': [], - 'sos_opt_line': '', 'ssh_key': '', 'ssh_port': 22, 'ssh_user': 'root', @@ -301,11 +302,16 @@ "collections. 'auto' for policy control.") sos_grp.add_argument('-e', '--enable-plugins', action="extend", help='Enable specific plugins for sosreport') + sos_grp.add_argument('--journal-size', type=int, default=0, + help='Limit the size of journals in MiB') sos_grp.add_argument('-k', '--plugin-option', '--plugopts', action="extend", dest='plugopts', help='Plugin option as plugname.option=value') sos_grp.add_argument('--log-size', default=0, type=int, - help='Limit the size of individual logs (in MiB)') + help='Limit the size of individual logs ' + '(not journals) in MiB') + sos_grp.add_argument('--low-priority', action='store_true', + default=False, help='Run reports as low priority') sos_grp.add_argument('-n', '--skip-plugins', action="extend", help='Skip these plugins') sos_grp.add_argument('-o', '--only-plugins', action="extend", @@ -404,9 +410,6 @@ help='Prompt for password for each node') collect_grp.add_argument('--preset', default='', required=False, help='Specify a sos preset to use') - collect_grp.add_argument('--sos-cmd', dest='sos_opt_line', - help=('Manually specify the commandline ' - 'for sos report on nodes')) collect_grp.add_argument('--ssh-user', help='Specify an SSH user. Default root') collect_grp.add_argument('--timeout', type=int, required=False, @@ -602,7 +605,7 @@ def log_warn(self, msg): """Log warn messages to both console and log file""" - self.soslog.warn(msg) + self.soslog.warning(msg) def log_error(self, msg): """Log error messages to both console and log file""" @@ -755,7 +758,7 @@ fname = os.path.join(group_path, cfg['name']) with open(fname, 'w') as hf: json.dump(cfg, hf) - os.chmod(fname, 0o644) + os.chmod(fname, 0o600) return fname def prep(self): @@ -918,7 +921,8 @@ self.ui_log.info('The following is a list of nodes to collect from:') if self.primary.connected and self.primary.hostname is not None: - if not (self.primary.local and self.opts.no_local): + if not ((self.primary.local and self.opts.no_local) + or self.cluster.strict_node_list): self.ui_log.info('\t%-*s' % (self.commons['hostlen'], self.primary.hostname)) @@ -938,42 +942,34 @@ def configure_sos_cmd(self): """Configures the sosreport command that is run on the nodes""" - self.sos_cmd = 'sosreport --batch ' - if self.opts.sos_opt_line: - filt = ['&', '|', '>', '<', ';'] - if any(f in self.opts.sos_opt_line for f in filt): - self.log_warn('Possible shell script found in provided sos ' - 'command. Ignoring --sos-opt-line entirely.') - self.opts.sos_opt_line = None - else: - self.sos_cmd = '%s %s' % ( - self.sos_cmd, quote(self.opts.sos_opt_line)) - self.log_debug("User specified manual sosreport command. " - "Command set to %s" % self.sos_cmd) - return True + sos_cmd = 'sosreport --batch ' - sos_opts = [] + sos_options = {} if self.opts.case_id: - sos_opts.append('--case-id=%s' % (quote(self.opts.case_id))) + sos_options['case-id'] = quote(self.opts.case_id) if self.opts.alloptions: - sos_opts.append('--alloptions') + sos_options['alloptions'] = '' if self.opts.all_logs: - sos_opts.append('--all-logs') + sos_options['all-logs'] = '' if self.opts.verify: - sos_opts.append('--verify') + sos_options['verify'] = '' if self.opts.log_size: - sos_opts.append(('--log-size=%s' % quote(str(self.opts.log_size)))) + sos_options['log-size'] = quote(str(self.opts.log_size)) if self.opts.sysroot: - sos_opts.append('-s %s' % quote(self.opts.sysroot)) + sos_options['sysroot'] = quote(self.opts.sysroot) if self.opts.chroot: - sos_opts.append('-c %s' % quote(self.opts.chroot)) + sos_options['chroot'] = quote(self.opts.chroot) if self.opts.compression_type != 'auto': - sos_opts.append('-z %s' % (quote(self.opts.compression_type))) - self.sos_cmd = self.sos_cmd + ' '.join(sos_opts) - self.log_debug("Initial sos cmd set to %s" % self.sos_cmd) - self.commons['sos_cmd'] = self.sos_cmd - self.collect_md.add_field('initial_sos_cmd', self.sos_cmd) + sos_options['compression-type'] = quote(self.opts.compression_type) + + for k, v in sos_options.items(): + sos_cmd += f"--{k} {v} " + sos_cmd = sos_cmd.rstrip() + self.log_debug(f"Initial sos cmd set to {sos_cmd}") + self.commons['sos_cmd'] = 'sosreport --batch ' + self.commons['sos_options'] = sos_options + self.collect_md.add_field('initial_sos_cmd', sos_cmd) def connect_to_primary(self): """If run with --primary, we will run cluster checks again that @@ -1038,12 +1034,13 @@ if applicable""" if (self.hostname in self.node_list and self.opts.no_local): self.node_list.remove(self.hostname) - for i in self.ip_addrs: - if i in self.node_list: - self.node_list.remove(i) + if not self.cluster.strict_node_list: + for i in self.ip_addrs: + if i in self.node_list: + self.node_list.remove(i) # remove the primary node from the list, since we already have # an open session to it. - if self.primary is not None: + if self.primary is not None and not self.cluster.strict_node_list: for n in self.node_list: if n == self.primary.hostname or n == self.opts.primary: self.node_list.remove(n) @@ -1189,11 +1186,18 @@ def collect(self): """ For each node, start a collection thread and then tar all collected sosreports """ - if self.primary.connected: + filters = set([self.primary.address, self.primary.hostname]) + # add primary if: + # - we are connected to it and + # - its hostname is in node_list, or + # - we dont forcibly remove local host from collection + # (i.e. strict_node_list=False) + if self.primary.connected and \ + (filters.intersection(set(self.node_list)) or + not self.cluster.strict_node_list): self.client_list.append(self.primary) self.ui_log.info("\nConnecting to nodes...") - filters = [self.primary.address, self.primary.hostname] nodes = [(n, None) for n in self.node_list if n not in filters] if self.opts.password_per_node: diff -Nru sosreport-4.4/sos/collector/sosnode.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/sosnode.py --- sosreport-4.4/sos/collector/sosnode.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/sosnode.py 2023-05-26 22:32:49.000000000 +0530 @@ -14,22 +14,26 @@ import os import re -from distutils.version import LooseVersion from pipes import quote from sos.policies import load from sos.policies.init_systems import InitSystem +from sos.collector.transports.juju import JujuSSH from sos.collector.transports.control_persist import SSHControlPersist from sos.collector.transports.local import LocalTransport from sos.collector.transports.oc import OCTransport +from sos.collector.transports.saltstack import SaltStackMaster from sos.collector.exceptions import (CommandTimeoutException, ConnectionException, UnsupportedHostException, InvalidTransportException) +from sos.utilities import parse_version TRANSPORTS = { 'local': LocalTransport, 'control_persist': SSHControlPersist, - 'oc': OCTransport + 'oc': OCTransport, + 'saltstack': SaltStackMaster, + 'juju': JujuSSH, } @@ -44,6 +48,7 @@ self.tmpdir = commons['tmpdir'] self.hostlen = commons['hostlen'] self.need_sudo = commons['need_sudo'] + self.sos_options = commons['sos_options'] self.local = False self.host = None self.cluster = None @@ -72,6 +77,10 @@ self.soslog = logging.getLogger('sos') self.ui_log = logging.getLogger('sos_ui') self._transport = self._load_remote_transport(commons) + # Overwrite need_sudo if transports default_user + # is set and is not root. + if self._transport.default_user: + self.need_sudo = self._transport.default_user != 'root' try: self._transport.connect(self._password) except Exception as err: @@ -294,7 +303,7 @@ if ver: if len(ver.split('.')) == 2: # safeguard against maintenance releases throwing off the - # comparison by LooseVersion + # comparison by parse_version ver += '.0' try: ver += '-%s' % rel.split('.')[0] @@ -382,7 +391,7 @@ return self.commons['policy'] host = load(cache={}, sysroot=self.opts.sysroot, init=InitSystem(), probe_runtime=True, - remote_exec=self._transport.remote_exec, + remote_exec=self._transport.run_command, remote_check=self.read_file('/etc/os-release')) if host: self.log_info("loaded policy %s for host" % host.distro) @@ -419,8 +428,8 @@ _ver = _format_version(ver) try: - _node_ver = LooseVersion(self.sos_info['version']) - _test_ver = LooseVersion(_ver) + _node_ver = parse_version(self.sos_info['version']) + _test_ver = parse_version(_ver) return _node_ver >= _test_ver except Exception as err: self.log_error("Error checking sos version: %s" % err) @@ -550,6 +559,12 @@ if plug not in self.enable_plugins: self.enable_plugins.append(plug) + if self.cluster.sos_options: + for opt in self.cluster.sos_options: + # take the user specification over any cluster defaults + if opt not in self.sos_options: + self.sos_options[opt] = self.cluster.sos_options[opt] + if self.cluster.sos_plugin_options: for opt in self.cluster.sos_plugin_options: if not any(opt in o for o in self.plugopts): @@ -584,9 +599,6 @@ if label: sos_cmd = '%s %s ' % (sos_cmd, quote(label)) - if self.opts.sos_opt_line: - return '%s %s' % (sos_cmd, self.opts.sos_opt_line) - sos_opts = [] # sos-3.6 added --threads @@ -639,6 +651,12 @@ "--namespaces=%s" % self.opts.namespaces ) + if self.check_sos_version('4.5.2'): + if self.opts.journal_size: + sos_opts.append(f"--journal-size={self.opts.journal_size}") + if self.opts.low_priority: + sos_opts.append('--low-priority') + self.update_cmd_from_cluster() sos_cmd = sos_cmd.replace( @@ -646,6 +664,10 @@ os.path.join(self.host.sos_bin_path, self.sos_bin) ) + for opt in self.sos_options: + _val = self.sos_options[opt] + sos_opts.append(f"--{opt} {_val if _val else ''}") + if self.plugopts: opts = [o for o in self.plugopts if self._plugin_exists(o.split('.')[0]) diff -Nru sosreport-4.4/sos/collector/transports/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/transports/__init__.py --- sosreport-4.4/sos/collector/transports/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/transports/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -29,6 +29,7 @@ """ name = 'undefined' + default_user = None def __init__(self, address, commons): self.address = address diff -Nru sosreport-4.4/sos/collector/transports/juju.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/transports/juju.py --- sosreport-4.4/sos/collector/transports/juju.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/transports/juju.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,84 @@ +# Copyright (c) 2023 Canonical Ltd., Chi Wai Chan + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + + +import subprocess + +from sos.collector.exceptions import JujuNotInstalledException +from sos.collector.transports import RemoteTransport +from sos.utilities import sos_get_command_output + + +class JujuSSH(RemoteTransport): + """ + A "transport" that leverages `juju ssh` to perform commands on the remote + hosts. + + This transport is expected to be used in juju managed environment, and the + user should have the necessary credential for accessing the controller. + When using this transport, the --nodes option will be expected to be a + comma separated machine IDs, **not** IP addr, since `juju ssh` identifies + the ssh target by machine ID. + + Examples: + + sos collect --nodes 0,1,2 --no-local --transport juju --batch + + """ + + name = "juju_ssh" + default_user = "ubuntu" + + def _check_juju_installed(self): + cmd = "juju version" + try: + subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) + except subprocess.CalledProcessError: + self.log_error("Failed to check `juju` version") + raise JujuNotInstalledException + return True + + def _chmod(self, fname): + cmd = f"{self.remote_exec} sudo chmod o+r {fname}" + try: + subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) + except subprocess.CalledProcessError: + self.log_error(f"Failed to make {fname} world-readable") + raise + return True + + def _connect(self, password=""): + self._connected = self._check_juju_installed() + return self._connected + + def _disconnect(self): + return True + + @property + def connected(self): + return self._connected + + @property + def remote_exec(self): + model, target_option = self.address.split(":") + model_option = f"-m {model}" if model else "" + option = f"{model_option} {target_option}" + return f"juju ssh {option}" + + def _retrieve_file(self, fname, dest): + self._chmod(fname) # juju scp needs the archive to be world-readable + model, unit = self.address.split(":") + model_option = f"-m {model}" if model else "" + cmd = f"juju scp {model_option} -- -r {unit}:{fname} {dest}" + res = sos_get_command_output(cmd) + return res["status"] == 0 + + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/collector/transports/saltstack.py sosreport-4.5.4ubuntu0.20.04.1/sos/collector/transports/saltstack.py --- sosreport-4.4/sos/collector/transports/saltstack.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/collector/transports/saltstack.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,136 @@ +# Copyright Red Hat 2022, Trevor Benson + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +import contextlib +import json +import os +import shutil +from sos.collector.transports import RemoteTransport +from sos.collector.exceptions import (ConnectionException, + SaltStackMasterUnsupportedException) +from sos.utilities import (is_executable, + sos_get_command_output) + + +class SaltStackMaster(RemoteTransport): + """ + A transport for collect that leverages SaltStack's Master Pub/Sub + functionality to send commands to minions. + + This transport will by default assume the use cmd.shell module to + execute commands on the minions. + """ + + name = 'saltstack' + + def _convert_output_json(self, json_output): + return list(json.loads(json_output).values())[0] + + def run_command( + self, cmd, timeout=180, need_root=False, env=None, get_pty=False): + """ + Run a command on the remote host using SaltStack Master. + If the output is json, convert it to a string. + """ + ret = super(SaltStackMaster, self).run_command( + cmd, timeout, need_root, env, get_pty) + with contextlib.suppress(Exception): + ret['output'] = self._convert_output_json(ret['output']) + return ret + + def _salt_retrieve_file(self, node, fname, dest): + """ + Execute cp.push on the remote host using SaltStack Master + """ + cmd = f"salt {node} cp.push {fname}" + res = sos_get_command_output(cmd) + if res['status'] == 0: + cachedir = f"/var/cache/salt/master/minions/{self.address}/files" + cachedir_file = os.path.join(cachedir, fname.lstrip('/')) + shutil.move(cachedir_file, dest) + return True + return False + + @property + def connected(self): + """Check if the remote host is responding using SaltStack Master.""" + up = self.run_command("echo Connected", timeout=10) + return up['status'] == 0 + + def _check_for_saltstack(self, password=None): + """Checks to see if the local system supported SaltStack Master. + + This check relies on feedback from the salt binary. The command being + run should always generate stderr output, but depending on what that + output reads we can determine if SaltStack Master is supported or not. + + For our purposes, a host that does not support SaltStack Master is not + able to run sos-collector. + + Returns + True if SaltStack Master is supported, else raise Exception + """ + + cmd = 'salt-run manage.status' + res = sos_get_command_output(cmd) + if res['status'] == 0: + return res['status'] == 0 + else: + raise SaltStackMasterUnsupportedException + + def _connect(self, password=None): + """Connect to the remote host using SaltStack Master. + + This method will attempt to connect to the remote host using SaltStack + Master. If the connection fails, an exception will be raised. + + If the connection is successful, the connection will be stored in the + self._connection attribute. + """ + if not is_executable('salt'): + self.log_error("salt command is not executable. ") + return False + + try: + self._check_for_saltstack() + except ConnectionException: + self.log_error("Transport is not locally supported. ") + raise + self.log_info("Transport is locally supported and service running. ") + cmd = "echo Connected" + result = self.run_command(cmd, timeout=180) + return result['status'] == 0 + + def _disconnect(self): + return True + + @property + def remote_exec(self): + """The remote execution command to use for this transport.""" + salt_args = "--out json --static --no-color" + return f"salt {salt_args} {self.address} cmd.shell " + + def _retrieve_file(self, fname, dest): + """Retrieve a file from the remote host using saltstack + + Parameters + fname The path to the file on the remote host + dest The path to the destination directory on the master + + Returns + True if the file was retrieved, else False + """ + return ( + self._salt_retrieve_file(self.address, fname, dest) + if self.connected + else False + ) + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/component.py sosreport-4.5.4ubuntu0.20.04.1/sos/component.py --- sosreport-4.4/sos/component.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/component.py 2023-05-26 22:32:49.000000000 +0530 @@ -85,7 +85,7 @@ except Exception: pass - # update args from component's arg_defaults defintion + # update args from component's arg_defaults definition self._arg_defaults.update(self.arg_defaults) self.opts = self.load_options() # lgtm [py/init-calls-subclass] @@ -218,9 +218,21 @@ # set all values back to their normal default codict = cmdopts.dict(preset_filter=False) for opt, val in codict.items(): - if opt not in cmdopts.arg_defaults.keys(): + if opt not in cmdopts.arg_defaults.keys() or val in [None, [], '']: continue - if val not in [None, [], ''] and val != opts.arg_defaults[opt]: + # A plugin that is [enabled|disabled|only] in cmdopts must + # overwrite these three options of itself in opts - reset it first + if opt in ["enable_plugins", "skip_plugins", "only_plugins"]: + for oopt in ["enable_plugins", "skip_plugins", "only_plugins"]: + common = set(val) & set(getattr(opts, oopt)) + # common has all plugins that are in this combination of + # "[-e|-o|-n] plug" of cmdopts & "[-e|-o|-n] plug" of opts + # so remove those plugins from this [-e|-o|-n] opts + if common: + setattr(opts, oopt, [x for x in getattr(opts, oopt) + if x not in common]) + + if val != opts.arg_defaults[opt]: setattr(opts, opt, val) return opts @@ -300,7 +312,7 @@ self.opts.encrypt_pass = None self.soslog.info("User specified --encrypt, but chose no " "encryption when prompted.") - self.ui_log.warn("Archive will not be encrypted") + self.ui_log.warning("Archive will not be encrypted") else: self._set_encrypt_from_env_vars() @@ -331,6 +343,25 @@ self.archive.set_debug(self.opts.verbosity > 2) + def add_ui_log_to_stdout(self): + ui_console = logging.StreamHandler(sys.stdout) + ui_console.setFormatter(logging.Formatter('%(message)s')) + ui_console.setLevel( + logging.DEBUG if self.opts.verbosity > 1 else logging.INFO + ) + self.ui_log.addHandler(ui_console) + + def set_loggers_verbosity(self, verbosity): + if verbosity: + if self.flog: + self.flog.setLevel(logging.DEBUG) + if self.opts.verbosity > 1: + self.console.setLevel(logging.DEBUG) + else: + self.console.setLevel(logging.WARNING) + else: + self.console.setLevel(logging.WARNING) + def _setup_logging(self): """Creates the log handler that shall be used by all components and any and all related bits to those components that need to log either to the @@ -339,28 +370,20 @@ # main soslog self.soslog = logging.getLogger('sos') self.soslog.setLevel(logging.DEBUG) - flog = None + self.flog = None if not self.check_listing_options(): self.sos_log_file = self.get_temp_file() - flog = logging.StreamHandler(self.sos_log_file) - flog.setFormatter(logging.Formatter( + self.flog = logging.StreamHandler(self.sos_log_file) + self.flog.setFormatter(logging.Formatter( '%(asctime)s %(levelname)s: %(message)s')) - flog.setLevel(logging.INFO) - self.soslog.addHandler(flog) + self.flog.setLevel(logging.INFO) + self.soslog.addHandler(self.flog) if not self.opts.quiet: - console = logging.StreamHandler(sys.stdout) - console.setFormatter(logging.Formatter('%(message)s')) - if self.opts.verbosity: - if flog: - flog.setLevel(logging.DEBUG) - if self.opts.verbosity > 1: - console.setLevel(logging.DEBUG) - else: - console.setLevel(logging.WARNING) - else: - console.setLevel(logging.WARNING) - self.soslog.addHandler(console) + self.console = logging.StreamHandler(sys.stdout) + self.console.setFormatter(logging.Formatter('%(message)s')) + self.set_loggers_verbosity(self.opts.verbosity) + self.soslog.addHandler(self.console) # still log ERROR level message to console, but only setup this handler # when --quiet is used, as otherwise we'll double log else: @@ -371,7 +394,9 @@ # ui log self.ui_log = logging.getLogger('sos_ui') - self.ui_log.setLevel(logging.INFO) + self.ui_log.setLevel( + logging.DEBUG if self.opts.verbosity > 1 else logging.INFO + ) if not self.check_listing_options(): self.sos_ui_log_file = self.get_temp_file() ui_fhandler = logging.StreamHandler(self.sos_ui_log_file) @@ -381,10 +406,7 @@ self.ui_log.addHandler(ui_fhandler) if not self.opts.quiet: - ui_console = logging.StreamHandler(sys.stdout) - ui_console.setFormatter(logging.Formatter('%(message)s')) - ui_console.setLevel(logging.INFO) - self.ui_log.addHandler(ui_console) + self.add_ui_log_to_stdout() def get_temp_file(self): return self.tempfile_util.new() @@ -399,16 +421,32 @@ metadata """ + def __init__(self): + self._values = {} + + def __iter__(self): + for item in self._values.items(): + yield item[1] + + def __getitem__(self, item): + return self._values[item] + + def __getattr__(self, attr): + try: + return self._values[attr] + except Exception: + raise AttributeError(attr) + def add_field(self, field_name, content): """Add a key, value entry to the current metadata instance """ - setattr(self, field_name, content) + self._values[field_name] = content def add_section(self, section_name): """Adds a new instance of SoSMetadata to the current instance """ - setattr(self, section_name, SoSMetadata()) - return getattr(self, section_name) + self._values[section_name] = SoSMetadata() + return self._values[section_name] def add_list(self, list_name, content=[]): """Add a named list element to the current instance. If content is not @@ -416,7 +454,7 @@ """ if not isinstance(content, list): raise TypeError('content added must be list') - setattr(self, list_name, content) + self._values[list_name] = content def get_json(self, indent=None): """Convert contents of this SoSMetdata instance, and all other nested @@ -425,7 +463,7 @@ Used to write manifest.json to the final archives. """ return json.dumps(self, - default=lambda o: getattr(o, '__dict__', str(o)), + default=lambda o: getattr(o, '_values', str(o)), indent=indent) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/__init__.py --- sosreport-4.4/sos/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -14,7 +14,7 @@ This module houses the i18n setup and message function. The default is to use gettext to internationalize messages. """ -__version__ = "4.4" +__version__ = "4.5.4" import os import sys diff -Nru sosreport-4.4/sos/plans/beakerlib.fmf sosreport-4.5.4ubuntu0.20.04.1/sos/plans/beakerlib.fmf --- sosreport-4.4/sos/plans/beakerlib.fmf 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/plans/beakerlib.fmf 1970-01-01 05:30:00.000000000 +0530 @@ -1,6 +0,0 @@ -summary: All upstreamed shell tests plan -description: - Execute BeakerLib tests from a different repository -discover+: - ref: main - filter: "tier: 0, 1, 2" \ No newline at end of file diff -Nru sosreport-4.4/sos/plans/main.fmf sosreport-4.5.4ubuntu0.20.04.1/sos/plans/main.fmf --- sosreport-4.4/sos/plans/main.fmf 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/plans/main.fmf 1970-01-01 05:30:00.000000000 +0530 @@ -1,13 +0,0 @@ -# Run on localhost by default -provision: - how: local - -discover: - how: fmf - url: https://github.com/sosreport/sos-tests.git - # ref: main # Please specify the ref in separate test plan - # filter: "tier: 0, 1, 2" # Please specify the filter in separate test plan - -# Use the internal executor -execute: - how: tmt \ No newline at end of file diff -Nru sosreport-4.4/sos/policies/distros/anolis.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/anolis.py --- sosreport-4.4/sos/policies/distros/anolis.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/anolis.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,46 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.policies.distros.redhat import RedHatPolicy, OS_RELEASE +import os + + +class AnolisPolicy(RedHatPolicy): + + distro = "Anolis OS" + vendor = "The OpenAnolis Project" + vendor_urls = [('Distribution Website', 'https://openanolis.org/')] + + def __init__(self, sysroot=None, init=None, probe_runtime=True, + remote_exec=None): + super(AnolisPolicy, self).__init__(sysroot=sysroot, init=init, + probe_runtime=probe_runtime, + remote_exec=remote_exec) + + @classmethod + def check(cls, remote=''): + + if remote: + return cls.distro in remote + + # Return False if /etc/os-release is missing + if not os.path.exists(OS_RELEASE): + return False + + # Return False if /etc/anolis-release is missing + if not os.path.isfile('/etc/anolis-release'): + return False + + with open(OS_RELEASE, 'r') as f: + for line in f: + if line.startswith('NAME'): + if 'Anolis OS' in line: + return True + return False + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/policies/distros/azure.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/azure.py --- sosreport-4.4/sos/policies/distros/azure.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/azure.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,45 @@ +# Copyright (C) Eric Desrochers + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.policies.distros.redhat import RedHatPolicy, OS_RELEASE +import os + + +class AzurePolicy(RedHatPolicy): + + distro = "Azure Linux" + vendor = "Microsoft" + vendor_urls = [ + ('Distribution Website', 'https://github.com/microsoft/CBL-Mariner') + ] + + def __init__(self, sysroot=None, init=None, probe_runtime=True, + remote_exec=None): + super(AzurePolicy, self).__init__(sysroot=sysroot, init=init, + probe_runtime=probe_runtime, + remote_exec=remote_exec) + + @classmethod + def check(cls, remote=''): + + if remote: + return cls.distro in remote + + if not os.path.exists(OS_RELEASE): + return False + + with open(OS_RELEASE, 'r') as f: + for line in f: + if line.startswith('NAME'): + if 'Common Base Linux Mariner' in line: + return True + return False + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/policies/distros/circle.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/circle.py --- sosreport-4.4/sos/policies/distros/circle.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/circle.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,49 @@ +# Copyright (C) Bella Zhang + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.policies.distros.redhat import RedHatPolicy, OS_RELEASE +import os + + +class CirclePolicy(RedHatPolicy): + + distro = "Circle Linux" + vendor = "The Circle Linux Project" + vendor_urls = [('Distribution Website', 'https://cclinux.org')] + + def __init__(self, sysroot=None, init=None, probe_runtime=True, + remote_exec=None): + super(CirclePolicy, self).__init__(sysroot=sysroot, init=init, + probe_runtime=probe_runtime, + remote_exec=remote_exec) + + @classmethod + def check(cls, remote=''): + + if remote: + return cls.distro in remote + + # Return False if /etc/os-release is missing + if not os.path.exists(OS_RELEASE): + return False + + # Return False if /etc/circle-release is missing + if not os.path.isfile('/etc/circle-release'): + return False + + with open(OS_RELEASE, 'r') as f: + for line in f: + if line.startswith('NAME'): + if 'Circle Linux' in line: + return True + + return False + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/policies/distros/cos.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/cos.py --- sosreport-4.4/sos/policies/distros/cos.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/cos.py 2023-05-26 22:32:49.000000000 +0530 @@ -36,6 +36,13 @@ valid_subclasses = [CosPlugin, IndependentPlugin] PATH = "/usr/local/bin:/usr/local/sbin:/usr/bin:/usr/sbin:/bin:/sbin" + def __init__(self, sysroot=None, init=None, probe_runtime=True, + remote_exec=None): + super(CosPolicy, self).__init__(sysroot=sysroot, init=init, + probe_runtime=probe_runtime, + remote_exec=remote_exec) + self.valid_subclasses += [CosPolicy] + @classmethod def check(cls, remote=''): if remote: diff -Nru sosreport-4.4/sos/policies/distros/debian.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/debian.py --- sosreport-4.4/sos/policies/distros/debian.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/debian.py 2023-05-26 22:32:49.000000000 +0530 @@ -23,6 +23,18 @@ + ":/usr/local/sbin:/usr/local/bin" sos_pkg_name = 'sosreport' + deb_versions = { + 'squeeze': 6, + 'wheezy': 7, + 'jessie': 8, + 'stretch': 9, + 'buster': 10, + 'bullseye': 11, + 'bookworm': 12, + 'trixie': 13, + 'forky': 14, + } + def __init__(self, sysroot=None, init=None, probe_runtime=True, remote_exec=None): super(DebianPolicy, self).__init__(sysroot=sysroot, init=init, @@ -50,12 +62,15 @@ def dist_version(self): try: - with open('/etc/lsb-release', 'r') as fp: - rel_string = fp.read() - if "wheezy/sid" in rel_string: - return 6 - elif "jessie/sid" in rel_string: - return 7 + with open('/etc/os-release', 'r') as fp: + rel_string = "" + lines = fp.readlines() + for line in lines: + if "VERSION_CODENAME" in line: + rel_string = line.split("=")[1].strip() + break + if rel_string in self.deb_versions: + return self.deb_versions[rel_string] return False except IOError: return False diff -Nru sosreport-4.4/sos/policies/distros/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/__init__.py --- sosreport-4.4/sos/policies/distros/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -21,7 +21,8 @@ from sos.policies.runtimes.podman import PodmanContainerRuntime from sos.policies.runtimes.docker import DockerContainerRuntime -from sos.utilities import shell_out, is_executable, bold +from sos.utilities import (shell_out, is_executable, bold, + sos_get_command_output) try: @@ -277,6 +278,9 @@ cmdline_opts = self.commons['cmdlineopts'] caseid = cmdline_opts.case_id if cmdline_opts.case_id else "" + if cmdline_opts.low_priority: + self._configure_low_priority() + # Set the cmdline settings to the class attrs that are referenced later # The policy default '_' prefixed versions of these are untouched to # allow fallback @@ -286,6 +290,7 @@ self.upload_password = cmdline_opts.upload_pass self.upload_archive_name = '' + # set or query for case id if not cmdline_opts.batch and not \ cmdline_opts.quiet: try: @@ -296,20 +301,57 @@ _("Optionally, please enter the case id that you are " "generating this report for [%s]: ") % caseid ) + except KeyboardInterrupt: + raise + if cmdline_opts.case_id: + self.case_id = cmdline_opts.case_id + + # set or query for upload credentials; this needs to be done after + # setting case id, as below methods might rely on detection of it + if not cmdline_opts.batch and not \ + cmdline_opts.quiet: + try: # Policies will need to handle the prompts for user information if cmdline_opts.upload and self.get_upload_url(): self.prompt_for_upload_user() self.prompt_for_upload_password() - self._print() + self.ui_log.info('') except KeyboardInterrupt: - self._print() raise - if cmdline_opts.case_id: - self.case_id = cmdline_opts.case_id - return + def _configure_low_priority(self): + """Used to constrain sos to a 'low priority' execution, potentially + letting individual policies set their own definition of what that is. + + By default, this will attempt to assign sos to an idle io class via + ionice if available. We will also renice our own pid to 19 in order to + not cause competition with other host processes for CPU time. + """ + _pid = os.getpid() + if is_executable('ionice'): + ret = sos_get_command_output( + f"ionice -c3 -p {_pid}", timeout=5 + ) + if ret['status'] == 0: + self.soslog.info('Set IO class to idle') + else: + msg = (f"Error setting IO class to idle: {ret['output']} " + f"(exit code {ret['status']})") + self.soslog.error(msg) + else: + self.ui_log.warning( + "Warning: unable to constrain report to idle IO class: " + "ionice is not available." + ) + + try: + os.nice(20) + self.soslog.info('Set niceness of report to 19') + except Exception as err: + self.soslog.error(f"Error setting report niceness to 19: {err}") + def prompt_for_upload_user(self): """Should be overridden by policies to determine if a user needs to be provided or not @@ -379,7 +421,9 @@ raise Exception("No upload destination provided by policy or by " "--upload-url") upload_func = self._determine_upload_type() - print(_("Attempting upload to %s" % self.get_upload_url_string())) + self.ui_log.info( + _(f"Attempting upload to {self.get_upload_url_string()}") + ) return upload_func() def _determine_upload_type(self): diff -Nru sosreport-4.4/sos/policies/distros/opencloudos.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/opencloudos.py --- sosreport-4.4/sos/policies/distros/opencloudos.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/opencloudos.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,42 @@ +# Copyright (c) 2022 Tencent., ZoeDong +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.policies.distros.redhat import RedHatPolicy, OS_RELEASE +import os + + +class OpenCloudOSPolicy(RedHatPolicy): + distro = "OpenCloudOS Stream" + vendor = "OpenCloudOS" + vendor_urls = [('Distribution Website', 'https://www.opencloudos.org/')] + + def __init__(self, sysroot=None, init=None, probe_runtime=True, + remote_exec=None): + super(OpenCloudOSPolicy, self).__init__(sysroot=sysroot, init=init, + probe_runtime=probe_runtime, + remote_exec=remote_exec) + + @classmethod + def check(cls, remote=''): + + if remote: + return cls.distro in remote + + if not os.path.exists(OS_RELEASE): + return False + + with open(OS_RELEASE, 'r') as f: + for line in f: + if line.startswith('NAME'): + if 'OpenCloudOS Stream' in line: + return True + + return False + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/policies/distros/redhat.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/redhat.py --- sosreport-4.4/sos/policies/distros/redhat.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/redhat.py 2023-05-26 22:32:49.000000000 +0530 @@ -38,7 +38,7 @@ vendor = "Red Hat" vendor_urls = [ ('Distribution Website', 'https://www.redhat.com/'), - ('Commercial Support', 'https://www.access.redhat.com/') + ('Commercial Support', 'https://access.redhat.com/') ] _tmp_dir = "/var/tmp" _in_container = False @@ -266,11 +266,19 @@ if self.commons['cmdlineopts'].upload_url: super(RHELPolicy, self).prompt_for_upload_user() return - if self.case_id and not self.get_upload_user(): - self.upload_user = input(_( - "Enter your Red Hat Customer Portal username for uploading [" - "empty for anonymous SFTP]: ") - ) + if not self.get_upload_user(): + if self.case_id: + self.upload_user = input(_( + "Enter your Red Hat Customer Portal username for " + "uploading [empty for anonymous SFTP]: ") + ) + else: # no case id provided => failover to SFTP + self.upload_url = RH_SFTP_HOST + self.ui_log.info("No case id provided, uploading to SFTP") + self.upload_user = input(_( + "Enter your Red Hat Customer Portal username for " + "uploading to SFTP [empty for anonymous]: ") + ) def get_upload_url(self): if self.upload_url: @@ -330,8 +338,14 @@ _user = self.get_upload_user() _token = json.loads(ret.text)['token'] else: - print("Unable to retrieve Red Hat auth token using provided " - "credentials. Will try anonymous.") + self.ui_log.debug( + f"DEBUG: auth attempt failed (status: {ret.status_code}): " + f"{ret.json()}" + ) + self.ui_log.error( + "Unable to retrieve Red Hat auth token using provided " + "credentials. Will try anonymous." + ) # we either do not have a username or password/token, or both if not _token: adata = {"isAnonymous": True} @@ -340,12 +354,16 @@ resp = json.loads(anon.text) _user = resp['username'] _token = resp['token'] - print( - "User '%s'" # lgtm [py/clear-text-logging-sensitive-data] - "used for anonymous upload. Please inform your support " - "engineer so they may retrieve the data." - % _user + self.ui_log.info( + _(f"User {_user} used for anonymous upload. Please inform " + f"your support engineer so they may retrieve the data.") + ) + else: + self.ui_log.debug( + f"DEBUG: anonymous request failed (status: " + f"{anon.status_code}): {anon.json()}" ) + if _user and _token: return super(RHELPolicy, self).upload_sftp(user=_user, password=_token) @@ -365,8 +383,10 @@ if not self.upload_url.startswith(RH_API_HOST): raise else: - print("Upload to Red Hat Customer Portal failed. Trying %s" - % RH_SFTP_HOST) + self.ui_log.error( + _(f"Upload to Red Hat Customer Portal failed. Trying " + f"{RH_SFTP_HOST}") + ) self.upload_url = RH_SFTP_HOST uploaded = super(RHELPolicy, self).upload_archive(archive) return uploaded diff -Nru sosreport-4.4/sos/policies/distros/suse.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/suse.py --- sosreport-4.4/sos/policies/distros/suse.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/suse.py 2023-05-26 22:32:49.000000000 +0530 @@ -34,7 +34,7 @@ # If rpm query timed out after timeout duration exit if not self.package_manager.packages: - print("Could not obtain installed package list", file=sys.stderr) + self.ui_log.error("Could not obtain installed package list.") sys.exit(1) self.PATH = "/usr/sbin:/usr/bin:/root/bin:/sbin" diff -Nru sosreport-4.4/sos/policies/distros/ubuntu.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/ubuntu.py --- sosreport-4.4/sos/policies/distros/ubuntu.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/distros/ubuntu.py 2023-05-26 22:32:49.000000000 +0530 @@ -9,6 +9,10 @@ from sos.report.plugins import UbuntuPlugin from sos.policies.distros.debian import DebianPolicy +from sos.policies.package_managers.snap import SnapPackageManager +from sos.policies.package_managers.dpkg import DpkgPackageManager +from sos.policies.package_managers import MultiPackageManager + import os @@ -31,6 +35,17 @@ super(UbuntuPolicy, self).__init__(sysroot=sysroot, init=init, probe_runtime=probe_runtime, remote_exec=remote_exec) + + self.package_manager = MultiPackageManager( + primary=DpkgPackageManager, + fallbacks=[SnapPackageManager], + chroot=self.sysroot, + remote_exec=remote_exec) + + if self.package_manager.pkg_by_name( + 'sosreport')['pkg_manager'] == 'snap': + self.sos_bin_path = '/snap/bin' + self.valid_subclasses += [UbuntuPlugin] @classmethod @@ -78,6 +93,6 @@ return self._upload_url fname = os.path.basename(self.upload_archive_name) return self._upload_url + fname - super(UbuntuPolicy, self).get_upload_url() + return super(UbuntuPolicy, self).get_upload_url() # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/policies/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/__init__.py --- sosreport-4.4/sos/policies/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -1,3 +1,4 @@ +import logging import os import platform import time @@ -68,6 +69,11 @@ :param probe_runtime: Should the Policy try to load a ContainerRuntime :type probe_runtime: ``bool`` + :param remote_exec: If this policy is loaded for a remote node, use + this to facilitate executing commands via the + SoSTransport in use + :type remote_exec: ``SoSTranport.run_command()`` + :cvar distro: The name of the distribution the Policy represents :vartype distro: ``str`` @@ -122,6 +128,8 @@ super(SubClass, self).__init__(). Policies that require runtime tests to construct PATH must call self.set_exec_path() after modifying PATH in their own initializer.""" + self.soslog = logging.getLogger('sos') + self.ui_log = logging.getLogger('sos_ui') self._parse_uname() self.case_id = None self.probe_runtime = probe_runtime @@ -424,44 +432,41 @@ file for this run :type map_file: ``str`` """ - # Logging is already shutdown and all terminal output must use the - # print() call. + # Logging is shut down, but there are some edge cases where automation + # does not capture printed output (e.g. avocado CI). Use the ui_log to + # still print to console in this case. # make sure a report exists if not archive and not directory: return False - self._print() - if map_file: - self._print(_("A mapping of obfuscated elements is available at" - "\n\t%s\n" % map_file)) + self.ui_log.info( + _(f"\nA mapping of obfuscated elements is available at" + f"\n\t{map_file}") + ) if archive: - self._print(_("Your sosreport has been generated and saved " - "in:\n\t%s\n") % archive, always=True) - self._print(_(" Size\t%s") % - get_human_readable(archivestat.st_size)) - self._print(_(" Owner\t%s") % - getpwuid(archivestat.st_uid).pw_name) + self.ui_log.info( + _(f"\nYour sosreport has been generated and saved in:" + f"\n\t{archive}\n") + ) + self.ui_log.info( + _(f" Size\t{get_human_readable(archivestat.st_size)}") + ) + self.ui_log.info( + _(f" Owner\t{getpwuid(archivestat.st_uid).pw_name}") + ) else: - self._print(_("Your sosreport build tree has been generated " - "in:\n\t%s\n") % directory, always=True) + self.ui_log.info( + _(f"Your sosreport build tree has been generated in:" + f"\n\t{directory}\n") + ) if checksum: - self._print(" " + self.get_preferred_hash_name() + "\t" + checksum) - self._print() - self._print(_("Please send this file to your support " - "representative.")) - self._print() - - def _print(self, msg=None, always=False): - """A wrapper around print that only prints if we are not running in - quiet mode""" - if always or not self.commons['cmdlineopts'].quiet: - if msg: - print(msg) - else: - print() + self.ui_log.info(f" {self.get_preferred_hash_name()}\t{checksum}") + self.ui_log.info( + _("\nPlease send this file to your support representative.\n") + ) def get_msg(self): """This method is used to prepare the preamble text to display to diff -Nru sosreport-4.4/sos/policies/package_managers/dpkg.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/package_managers/dpkg.py --- sosreport-4.4/sos/policies/package_managers/dpkg.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/package_managers/dpkg.py 2023-05-26 22:32:49.000000000 +0530 @@ -15,9 +15,18 @@ """Subclass for dpkg-based distrubitons """ - query_command = "dpkg-query -W -f='${Package}|${Version}\\n'" + query_command = "dpkg-query -W -f='${Package}|${Version}|${Status}\\n'" + query_path_command = "dpkg -S" verify_command = "dpkg --verify" verify_filter = "" + def _parse_pkg_list(self, pkg_list): + for pkg in pkg_list.splitlines(): + if '|' not in pkg: + continue + name, version, status = pkg.split('|') + if 'deinstall' in status: + continue + yield (name, version, None) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/policies/package_managers/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/package_managers/__init__.py --- sosreport-4.4/sos/policies/package_managers/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/package_managers/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -11,8 +11,7 @@ import re import fnmatch -from sos.utilities import shell_out -from pipes import quote +from sos.utilities import sos_get_command_output class PackageManager(): @@ -42,35 +41,22 @@ :vartype chroot: ``bool`` :cvar remote_exec: If package manager is on a remote system (e.g. for - sos collect), prepend this SSH command to run remotely - :vartype remote_exec: ``str`` or ``None`` + sos collect), use this to execute commands + :vartype remote_exec: ``SoSTransport.run_command()`` """ query_command = None verify_command = None verify_filter = None files_command = None + query_path_command = None chroot = None files = None - def __init__(self, chroot=None, query_command=None, - verify_command=None, verify_filter=None, - files_command=None, remote_exec=None): + def __init__(self, chroot=None, remote_exec=None): self._packages = {} self.files = [] - - self.query_command = query_command or self.query_command - self.verify_command = verify_command or self.verify_command - self.verify_filter = verify_filter or self.verify_filter - self.files_command = files_command or self.files_command - - # if needed, append the remote command to these so that this returns - # the remote package details, not local - if remote_exec: - for cmd in ['query_command', 'verify_command', 'files_command']: - if getattr(self, cmd) is not None: - _cmd = getattr(self, cmd) - setattr(self, cmd, "%s %s" % (remote_exec, quote(_cmd))) + self.remote_exec = remote_exec if chroot: self.chroot = chroot @@ -81,6 +67,51 @@ self._generate_pkg_list() return self._packages + @property + def manager_name(self): + return self.__class__.__name__.lower().split('package')[0] + + def exec_cmd(self, command, timeout=30, need_root=False, env=None, + get_pty=False, chroot=None): + """ + Runs a package manager command, either via sos_get_command_output() if + local, or via a SoSTransport's run_command() if this needs to be run + remotely, as in the case of remote nodes for use during `sos collect`. + + :param command: The command to execute + :type command: ``str`` + + :param timeout: Timeout for command to run, in seconds + :type timeout: ``int`` + + :param need_root: Does the command require root privileges? + :type need_root: ``bool`` + + :param env: Environment variables to set + :type env: ``dict`` with keys being env vars to define + + :param get_pty: If running remotely, does the command require + obtaining a pty? + :type get_pty: ``bool`` + + :param chroot: If necessary, chroot command execution to here + :type chroot: ``None`` or ``str`` + + :returns: The output of the command + :rtype: ``str`` + """ + if self.remote_exec: + ret = self.remote_exec(command, timeout, need_root, env, get_pty) + else: + ret = sos_get_command_output(command, timeout, chroot=chroot, + env=env) + if ret['status'] == 0: + return ret['output'] + # In the case of package managers, we don't want to potentially iterate + # over stderr, so prevent the package methods from doing anything at + # all by returning nothing. + return '' + def all_pkgs_by_name(self, name): """ Get a list of packages that match name. @@ -123,34 +154,44 @@ except Exception: return None + def _parse_pkg_list(self, pkg_list): + """ + Using the output of `query_command`, build the _packages dict. + + This should be overridden by distinct package managers and be a + generator for _generate_pkg_list which will insert the packages into + the _packages dict. + + This method should yield a tuple of name, version, release for each + package parsed. If the package manager or distribution does not use a + release field, set it to None. + + :param pkg_list: The output of the result of `query_command` + :type pkg_list: ``str`` + """ + raise NotImplementedError + def _generate_pkg_list(self): """Generates a dictionary of packages for internal use by the package manager in the format:: {'package_name': {'name': 'package_name', - 'version': 'major.minor.version'}} + 'version': 'major.minor.version', + 'release': 'package release' or None, + 'pkg_manager': 'package manager name'}} """ if self.query_command: cmd = self.query_command - pkg_list = shell_out( - cmd, timeout=0, chroot=self.chroot - ).splitlines() + pkg_list = self.exec_cmd(cmd, timeout=30, chroot=self.chroot) - for pkg in pkg_list: - if '|' not in pkg: - continue - elif pkg.count("|") == 1: - name, version = pkg.split("|") - release = None - elif pkg.count("|") == 2: - name, version, release = pkg.split("|") - self._packages[name] = { - 'name': name, - 'version': version.split(".") + for pkg in self._parse_pkg_list(pkg_list): + self._packages[pkg[0]] = { + 'name': pkg[0], + 'version': pkg[1].split('.'), + 'release': pkg[2], + 'pkg_manager': self.manager_name } - release = release if release else None - self._packages[name]['release'] = release def pkg_version(self, pkg): """Returns the entry in self.packages for pkg if it exists @@ -188,10 +229,28 @@ """ if self.files_command and not self.files: cmd = self.files_command - files = shell_out(cmd, timeout=0, chroot=self.chroot) + files = self.exec_cmd(cmd, timeout=180, chroot=self.chroot) self.files = files.splitlines() return self.files + def pkg_by_path(self, path): + """Given a path, return the package that owns that path. + + :param path: The filepath to check for package ownership + :type path: ``str`` + + :returns: The package name or 'unknown' + :rtype: ``str`` + """ + if not self.query_path_command: + return 'unknown' + try: + cmd = f"{self.query_path_command} {path}" + pkg = self.exec_cmd(cmd, timeout=5, chroot=self.chroot) + return pkg.splitlines() or 'unknown' + except Exception: + return 'unknown' + def build_verify_command(self, packages): """build_verify_command(self, packages) -> str Generate a command to verify the list of packages given @@ -234,4 +293,108 @@ return self.verify_command + " " + verify_packages +class MultiPackageManager(PackageManager): + """ + This class is used to leverage multiple individual package managers as a + single entity on systems that support multiple concurrent package managers. + + Policies that use this approach will need to specify a primary package + manager, and at least one fallback manager. When queries are sent to this + manager, the primary child manager is checked first. If there is a valid, + not None, response (e.g. a given package is installed) then that response + is used. However, if the response is empty or None, the fallback managers + are then queried in the order they were passed to MultiPackageManager + during initialization. + + :param primary: The primary package manager to rely on + :type primary: A subclass of `PackageManager` + + :param fallbacks: A list of package managers to use if the primary does not + provide a response + :type fallbacks: ``list`` of `PackageManager` subclasses + """ + + def __init__(self, primary, fallbacks, chroot=None, remote_exec=None): + super(MultiPackageManager, self).__init__(chroot=chroot, + remote_exec=remote_exec) + + if not issubclass(primary, PackageManager): + raise Exception( + f"Primary package manager must be PackageManager subclass, not" + f" {primary.__class__}" + ) + + if not isinstance(fallbacks, list): + raise Exception('Fallbacks must be specified in a list') + + for pm in fallbacks: + if not issubclass(pm, PackageManager): + raise Exception( + f"Fallback package managers must be PackageManager " + f"subclass, not {pm.__class__}" + ) + + self.primary = primary(chroot=chroot, remote_exec=remote_exec) + self.fallbacks = [ + pm(chroot=chroot, remote_exec=remote_exec) for pm in fallbacks + ] + + if not self.fallbacks: + raise Exception( + 'Must define at least one fallback package manager' + ) + + self._managers = [self.primary] + self._managers.extend(self.fallbacks) + + def all_files(self): + if not self.files: + for pm in self._managers: + self.files.extend(pm.all_files()) + return self.files + + def _generate_pkg_list(self): + self._packages.update(self.primary.packages) + for pm in self.fallbacks: + _pkgs = pm.packages + for pkg in _pkgs.keys(): + if pkg not in self._packages: + self._packages[pkg] = _pkgs[pkg] + + def _pm_wrapper(self, method): + """ + This wrapper method is used to provide implicit iteration through the + primary and any defined fallback managers that are set for a given + instance of MultiPackageManager. + + Important note: we pass the _name_ of the method to run here as a + string, and not any actual method as we rely on iteratively looking up + the actual method in each package manager. + + :param method: The name of the method we're wrapping for the purpose of + iterating through defined package managers + :type method: ``str`` + """ + def pkg_func(*args, **kwargs): + ret = None + for pm in self._managers: + if not ret or ret == 'unknown': + _wrapped_func = getattr(pm, method) + ret = _wrapped_func(*args, **kwargs) + return ret + return pkg_func + + def __getattribute__(self, item): + # if the attr is callable, then we need to iterate over our child + # managers until we get a response, unless it is _generate_pkg_list in + # which case we only want to use the one actually defined here, or + # _pm_wrapper, which we need to avoid this override for to not hit + # recursion hell. + if item in ['_generate_pkg_list', '_pm_wrapper', 'all_files']: + return super().__getattribute__(item) + attr = super().__getattribute__(item) + if hasattr(attr, '__call__'): + return self._pm_wrapper(item) + return attr + # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/policies/package_managers/rpm.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/package_managers/rpm.py --- sosreport-4.4/sos/policies/package_managers/rpm.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/package_managers/rpm.py 2023-05-26 22:32:49.000000000 +0530 @@ -16,9 +16,16 @@ """ query_command = 'rpm -qa --queryformat "%{NAME}|%{VERSION}|%{RELEASE}\\n"' + query_path_command = 'rpm -qf' files_command = 'rpm -qal' verify_command = 'rpm -V' verify_filter = ["debuginfo", "-devel"] + def _parse_pkg_list(self, pkg_list): + for pkg in pkg_list.splitlines(): + if '|' not in pkg: + continue + name, version, release = pkg.split('|') + yield (name, version, release) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/policies/package_managers/snap.py sosreport-4.5.4ubuntu0.20.04.1/sos/policies/package_managers/snap.py --- sosreport-4.4/sos/policies/package_managers/snap.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/policies/package_managers/snap.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,33 @@ +# Copyright 2023 Canonical Ltd. Arif Ali + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.policies.package_managers import PackageManager + + +class SnapPackageManager(PackageManager): + """Subclass for snap-based distributions + """ + + query_command = "snap list" + query_path_command = "" + verify_command = "" + verify_filter = "" + + def _parse_pkg_list(self, pkg_list): + for line in pkg_list.splitlines(): + if line == "": + continue + pkg = line.split() + if pkg[0] == "Name" or pkg[0] == "Connection": + continue + name, version = pkg[0], pkg[1] + yield (name, version, None) + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/presets/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/presets/__init__.py --- sosreport-4.4/sos/presets/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/presets/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -106,10 +106,24 @@ NO_PRESET_DESC = 'Do not load a preset' NO_PRESET_NOTE = 'Use to disable automatically loaded presets' +SMALL_PRESET = 'minimal' +SMALL_PRESET_DESC = ('Small and quick report that reduces sos report resource ' + 'consumption') +SMALL_PRESET_NOTE = ('May be useful for low-resource systems, but may not ' + 'provide sufficient data for analysis') + +SMALL_PRESET_OPTS = SoSOptions(log_size=10, journal_size=10, plugin_timeout=30, + command_timeout=30, low_priority=True) + GENERIC_PRESETS = { - NO_PRESET: PresetDefaults(name=NO_PRESET, desc=NO_PRESET_DESC, - note=NO_PRESET_NOTE, opts=SoSOptions()) + NO_PRESET: PresetDefaults( + name=NO_PRESET, desc=NO_PRESET_DESC, note=NO_PRESET_NOTE, + opts=SoSOptions() + ), + SMALL_PRESET: PresetDefaults( + name=SMALL_PRESET, desc=SMALL_PRESET_DESC, note=SMALL_PRESET_NOTE, + opts=SMALL_PRESET_OPTS + ) } - # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/presets/redhat/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/presets/redhat/__init__.py --- sosreport-4.4/sos/presets/redhat/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/presets/redhat/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -37,8 +37,8 @@ RHOCP = "ocp" RHOCP_DESC = "OpenShift Container Platform by Red Hat" RHOCP_OPTS = SoSOptions( - verify=True, skip_plugins=['cgroups'], container_runtime='crio', - no_report=True, log_size=100, + skip_plugins=['cgroups'], container_runtime='crio', no_report=True, + log_size=100, plugopts=[ 'crio.timeout=600', 'networking.timeout=600', diff -Nru sosreport-4.4/sos/report/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/__init__.py --- sosreport-4.4/sos/report/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -92,6 +92,7 @@ 'estimate_only': False, 'experimental': False, 'enable_plugins': [], + 'journal_size': 100, 'keywords': [], 'keyword_file': None, 'plugopts': [], @@ -100,6 +101,7 @@ 'list_presets': False, 'list_profiles': False, 'log_size': 25, + 'low_priority': False, 'map_file': '/etc/sos/cleaner/default_mapping', 'skip_commands': [], 'skip_files': [], @@ -168,10 +170,7 @@ self.opts = self.apply_options_from_cmdline(self.opts) if hasattr(self.preset.opts, 'verbosity') and \ self.preset.opts.verbosity > 0: - print('\nWARNING: It is not recommended to set verbosity via the ' - 'preset as it might have\nunforseen consequences for your ' - 'report logs.\n') - self._setup_logging() + self.set_loggers_verbosity(self.preset.opts.verbosity) self._set_directories() @@ -241,6 +240,10 @@ report_grp.add_argument("-e", "--enable-plugins", action="extend", dest="enable_plugins", type=str, help="enable these plugins", default=[]) + report_grp.add_argument("--journal-size", type=int, default=100, + dest="journal_size", + help="limit the size of collected journals " + "in MiB") report_grp.add_argument("-k", "--plugin-option", "--plugopts", action="extend", dest="plugopts", type=str, @@ -262,7 +265,11 @@ report_grp.add_argument("--log-size", action="store", dest="log_size", type=int, default=25, help="limit the size of collected logs " - "(in MiB)") + "(not journals) in MiB") + report_grp.add_argument("--low-priority", action="store_true", + default=False, + help="generate report with low system priority" + ) report_grp.add_argument("--namespaces", default=None, help="limit number of namespaces to collect " "output for - 0 means unlimited") @@ -460,7 +467,7 @@ elif not self.policy.runtimes: msg = ("WARNING: No container runtimes are active, ignoring " "option to set default runtime to '%s'\n" % crun) - self.soslog.warn(msg) + self.soslog.warning(msg) elif crun not in self.policy.runtimes.keys(): valid = ', '.join(p for p in self.policy.runtimes.keys() if p != 'default') @@ -552,7 +559,7 @@ _devs.update(self._get_eth_devs()) _devs['bridge'] = self._get_bridge_devs() except Exception as err: - self.soslog.warn("Could not enumerate network devices: %s" % err) + self.soslog.warning(f"Could not enumerate network devices: {err}") return _devs def _get_network_namespace_devices(self): @@ -618,7 +625,7 @@ if _nseth not in filt_devs: _eth_devs.append(_nseth) except Exception as err: - self.soslog.warn( + self.soslog.warning( "Could not determine network namespace '%s' devices: %s" % (namespace, err) ) @@ -639,7 +646,7 @@ try: _bout = sos_get_command_output('brctl show', timeout=15) except Exception as err: - self.soslog.warn("Unable to enumerate bridge devices: %s" % err) + self.soslog.warning("Unable to enumerate bridge devices: %s" % err) if _bout['status'] == 0: for _bline in _bout['output'].splitlines()[1:]: try: @@ -1297,16 +1304,21 @@ pool.shutdown(wait=True) pool._threads.clear() if self.opts.estimate_only: - from pathlib import Path - tmpdir_path = Path(self.archive.get_tmp_dir()) - self.estimated_plugsizes[plugin[1]] = sum( - [f.lstat().st_size for f in tmpdir_path.glob('**/*')]) + # call "du -s -B1" for the tmp dir to get the disk usage of the + # data collected by the plugin - if the command fails, count with 0 + tmpdir = self.archive.get_tmp_dir() + try: + du = sos_get_command_output('du -sB1 %s' % tmpdir) + self.estimated_plugsizes[plugin[1]] = \ + int(du['output'].split()[0]) + except Exception: + self.estimated_plugsizes[plugin[1]] = 0 # remove whole tmp_dir content - including "sos_commands" and # similar dirs that will be re-created on demand by next plugin # if needed; it is less error-prone approach than skipping # deletion of some dirs but deleting their content - for f in os.listdir(self.archive.get_tmp_dir()): - f = os.path.join(self.archive.get_tmp_dir(), f) + for f in os.listdir(tmpdir): + f = os.path.join(tmpdir, f) if os.path.isdir(f) and not os.path.islink(f): rmtree(f) else: @@ -1328,7 +1340,7 @@ ) self.ui_progress(status_line) try: - plug.collect() + plug.collect_plugin() # certain exceptions can cause either of these lists to no # longer contain the plugin, which will result in sos hanging # so we can't blindly call remove() on these two. @@ -1489,6 +1501,8 @@ directory = None # report directory path (--build) map_file = None # path of the map file generated for the report + self.generate_manifest_tag_summary() + # use this instead of self.opts.clean beyond the initial check if # cleaning was requested in case SoSCleaner fails for some reason do_clean = False @@ -1524,6 +1538,12 @@ short_name='manifest.json' ) + # Now, just (optionally) pack the report and print work outcome; let + # print ui_log to stdout also in quiet mode. For non-quiet mode we + # already added the handler + if self.opts.quiet: + self.add_ui_log_to_stdout() + # print results in estimate mode (to include also just added manifest) if self.opts.estimate_only: from sos.utilities import get_human_readable @@ -1579,15 +1599,16 @@ os.umask(old_umask) else: if self.opts.encrypt_pass or self.opts.encrypt_key: - self.ui_log.warn("\nUnable to encrypt when using --build. " - "Encryption is only available for archives.") + self.ui_log.warning("\nUnable to encrypt when using --build. " + "Encryption is only available for " + "archives.") # move the archive root out of the private tmp directory. directory = self.archive.get_archive_path() dir_name = os.path.basename(directory) + if do_clean: + dir_name = cleaner.obfuscate_string(dir_name) try: final_dir = os.path.join(self.sys_tmp, dir_name) - if do_clean: - final_dir = cleaner.obfuscate_string(final_dir) os.rename(directory, final_dir) directory = final_dir except (OSError, IOError): @@ -1617,12 +1638,12 @@ # output filename is in the private tmpdir - move it to the # containing directory. - final_name = os.path.join(self.sys_tmp, - os.path.basename(archive)) + base_archive = os.path.basename(archive) if do_clean: - final_name = cleaner.obfuscate_string( - final_name.replace('.tar', '-obfuscated.tar') + base_archive = cleaner.obfuscate_string( + base_archive.replace('.tar', '-obfuscated.tar') ) + final_name = os.path.join(self.sys_tmp, base_archive) # Get stat on the archive archivestat = os.stat(archive) @@ -1654,9 +1675,8 @@ except (OSError, IOError): print(_("Error moving checksum file: %s" % archive_hash)) - if not self.opts.build: - self.policy.display_results(archive, directory, checksum, - archivestat, map_file=map_file) + self.policy.display_results(archive, directory, checksum, + archivestat, map_file=map_file) else: self.policy.display_results(archive, directory, checksum, map_file=map_file) @@ -1696,6 +1716,16 @@ self.report_md.add_field('preset', self.preset.name if self.preset else 'unset') self.report_md.add_list('profiles', self.opts.profiles) + + _io_class = 'unknown' + if is_executable('ionice'): + _io = sos_get_command_output(f"ionice -p {os.getpid()}") + if _io['status'] == 0: + _io_class = _io['output'].split()[0].strip(':') + self.report_md.add_section('priority') + self.report_md.priority.add_field('io_class', _io_class) + self.report_md.priority.add_field('niceness', os.nice(0)) + self.report_md.add_section('devices') for key, value in self.devices.items(): self.report_md.devices.add_field(key, value) @@ -1703,6 +1733,38 @@ self.report_md.add_list('disabled_plugins', self.opts.skip_plugins) self.report_md.add_section('plugins') + def generate_manifest_tag_summary(self): + """Add a section to the manifest that contains a dict summarizing the + tags that were created and assigned during this report's creation. + + This summary dict can be used for easier inspection of tagged items by + inspection/analyzer projects such as Red Hat Insights. The format of + this dict is `{tag_name: [file_list]}`. + """ + def compile_tags(ent, key='filepath'): + for tag in ent['tags']: + if not ent[key] or not tag: + continue + try: + path = tag_summary[tag] + except KeyError: + path = [] + path.extend( + ent[key] if isinstance(ent[key], list) else [ent[key]] + ) + tag_summary[tag] = sorted(list(set(path))) + + tag_summary = {} + for plug in self.report_md.plugins: + for cmd in plug.commands: + compile_tags(cmd) + for _file in plug.files: + compile_tags(_file, 'files_copied') + for collection in plug.collections: + compile_tags(collection) + self.report_md.add_field('tag_summary', + dict(sorted(tag_summary.items()))) + def _merge_preset_options(self): # Log command line options msg = "[%s:%s] executing 'sos %s'" @@ -1745,9 +1807,9 @@ if not self.verify_plugins(): return False - self.add_manifest_data() self.batch() self.prework() + self.add_manifest_data() self.setup() self.collect() if not self.opts.no_env_vars: diff -Nru sosreport-4.4/sos/report/plugins/abrt.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/abrt.py --- sosreport-4.4/sos/report/plugins/abrt.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/abrt.py 2023-05-26 22:32:49.000000000 +0530 @@ -27,7 +27,7 @@ def setup(self): self.add_cmd_output("abrt-cli status", - tags=["abrt_status", "insights_abrt_status_bare"]) + tags=["abrt_status", "abrt_status_bare"]) abrt_list = self.collect_cmd_output("abrt-cli list") if self.get_option("detailed") and abrt_list['status'] == 0: for line in abrt_list["output"].splitlines(): diff -Nru sosreport-4.4/sos/report/plugins/alternatives.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/alternatives.py --- sosreport-4.4/sos/report/plugins/alternatives.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/alternatives.py 2023-05-26 22:32:49.000000000 +0530 @@ -8,23 +8,17 @@ # # See the LICENSE file in the source distribution for further information. -from sos.report.plugins import Plugin, RedHatPlugin +from sos.report.plugins import Plugin, RedHatPlugin, UbuntuPlugin -class Alternatives(Plugin, RedHatPlugin): +class Alternatives(Plugin): short_desc = 'System alternatives' plugin_name = 'alternatives' - packages = ('chkconfig',) - commands = ('alternatives',) def setup(self): - self.add_cmd_tags({ - "alternatives --display java.*": 'insights_display_java' - }) - - self.add_cmd_output('alternatives --version') + self.add_cmd_output('%s --version' % self.alternatives_cmd) alts = [] ignore = [ @@ -35,13 +29,55 @@ 'xinputrc' ] - res = self.collect_cmd_output('alternatives --list') + res = self.collect_cmd_output(self.alternatives_list) if res['status'] == 0: for line in res['output'].splitlines(): alt = line.split()[0] if alt not in ignore: alts.append(alt) - disp_cmd = "alternatives --display %s" + disp_cmd = "%s --display %s" % (self.alternatives_cmd, "%s") self.add_cmd_output([disp_cmd % alt for alt in alts]) + +class RedHatAlternatives(Alternatives, RedHatPlugin): + + packages = ('alternatives',) + commands = ('alternatives',) + + alternatives_cmd = 'alternatives' + alternatives_list = '%s --list' % alternatives_cmd + + def setup(self): + + super(RedHatAlternatives, self).setup() + + self.add_cmd_tags({ + "alternatives --display java.*": 'display_java', + "alternatives --display python.*": + 'alternatives_display_python' + }) + + +class UbuntuAlternatives(Alternatives, UbuntuPlugin): + + packages = ('dpkg',) + commands = ('update-alternatives',) + + alternatives_cmd = 'update-alternatives' + alternatives_list = '%s --get-selections' % alternatives_cmd + + def setup(self): + + super(UbuntuAlternatives, self).setup() + + if self.get_option("all_logs"): + self.add_copy_spec([ + "/var/log/alternatives.log*", + ]) + else: + self.add_copy_spec([ + "/var/log/alternatives.log", + "/var/log/alternatives.log.1", + ]) + # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/apache.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/apache.py --- sosreport-4.4/sos/report/plugins/apache.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/apache.py 2023-05-26 22:32:49.000000000 +0530 @@ -38,10 +38,10 @@ def setup(self): # collect list of installed modules and verify config syntax. self.add_cmd_output([ - "apachectl -M", "apachectl -S", "apachectl -t" ], cmd_as_tag=True) + self.add_cmd_output("apachectl -M", tags="httpd_M") # Other plugins collect these files; # do not collect them here to avoid collisions in the archive paths. @@ -50,9 +50,12 @@ 'ceilometer', 'cinder', 'foreman', + 'gnocchi', 'horizon', 'keystone', + 'manila', 'nova', + 'octavia', 'placement', 'pulp' ] @@ -81,10 +84,10 @@ def setup(self): self.add_file_tags({ - ".*/access_log": 'httpd_access_log', - ".*/error_log": 'httpd_error_log', - ".*/ssl_access_log": 'httpd_ssl_access_log', - ".*/ssl_error_log": 'httpd_ssl_error_log' + "/var/log/httpd/access_log": 'httpd_access_log', + "/var/log/httpd/error_log": 'httpd_error_log', + "/var/log/httpd/ssl_access_log": 'httpd_ssl_access_log', + "/var/log/httpd/ssl_error_log": 'httpd_ssl_error_log' }) super(RedHatApache, self).setup() @@ -131,7 +134,7 @@ class DebianApache(Apache, DebianPlugin, UbuntuPlugin): files = ('/etc/apache2/apache2.conf',) - apachepkg = 'apache' + apachepkg = 'apache2' def setup(self): super(DebianApache, self).setup() @@ -144,10 +147,15 @@ # collect only the current log set by default self.add_copy_spec([ - "/var/log/apache2/access_log", - "/var/log/apache2/error_log", + "/var/log/apache2/access.log", + "/var/log/apache2/error.log", + "/var/log/apache2/ssl_access.log", + "/var/log/apache2/ssl_error.log", + "/var/log/apache2/other_vhosts_access.log", ]) if self.get_option("log") or self.get_option("all_logs"): - self.add_copy_spec("/var/log/apache2/*") + self.add_copy_spec([ + "/var/log/apache2", + ]) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/apport.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/apport.py --- sosreport-4.4/sos/report/plugins/apport.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/apport.py 2023-05-26 22:32:49.000000000 +0530 @@ -26,6 +26,7 @@ ]) else: self.add_copy_spec("/var/log/apport*") + self.add_copy_spec("/var/crash/**") self.add_copy_spec("/etc/apport/*") self.add_copy_spec("/var/lib/whoopsie/whoopsie-id") self.add_cmd_output( diff -Nru sosreport-4.4/sos/report/plugins/apt.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/apt.py --- sosreport-4.4/sos/report/plugins/apt.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/apt.py 2023-05-26 22:32:49.000000000 +0530 @@ -45,4 +45,18 @@ suggest_filename="apt-cache_policy_details" ) + def postproc(self): + super(Apt, self).postproc() + self.do_file_sub( + "/etc/apt/sources.list", + r"(deb\shttp(s)?://)\S+:\S+(@.*)", + r"\1******:******\3" + ) + self.do_path_regex_sub( + "/etc/apt/sources.list.d/", + r"(deb\shttp(s)?://)\S+:\S+(@.*)", + r"\1******:******\3" + ) + + # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/auditd.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/auditd.py --- sosreport-4.4/sos/report/plugins/auditd.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/auditd.py 2023-05-26 22:32:49.000000000 +0530 @@ -27,11 +27,12 @@ "/etc/audit/plugins.d/", "/etc/audisp/", ]) - self.add_cmd_output([ - "ausearch --input-logs -m avc,user_avc,fanotify -ts today", - "auditctl -s", - "auditctl -l" - ]) + + self.add_cmd_output( + "ausearch --input-logs -m avc,user_avc,fanotify -ts today" + ) + self.add_cmd_output("auditctl -l", tags="auditctl_rules") + self.add_cmd_output("auditctl -s", tags="auditctl_status") config_file = "/etc/audit/auditd.conf" log_file = "/var/log/audit/audit.log" diff -Nru sosreport-4.4/sos/report/plugins/autofs.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/autofs.py --- sosreport-4.4/sos/report/plugins/autofs.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/autofs.py 2023-05-26 22:32:49.000000000 +0530 @@ -43,6 +43,7 @@ def setup(self): self.add_copy_spec("/etc/auto*") + self.add_file_tags({"/etc/autofs.conf": "autofs_conf"}) self.add_service_status("autofs") self.add_cmd_output("automount -m") if self.checkdebug(): @@ -54,6 +55,25 @@ r"(password=)[^,\s]*", r"\1********" ) + # Hide secrets in the LDAP authentication config + # + # Example of scrubbing of the secret: + # + # secret="abc" + # or + # encoded_secret = 'abc' + # + # to: + # + # secret="********" + # or + # encoded_secret = '********' + # + self.do_file_sub( + "/etc/autofs_ldap_auth.conf", + r"(secret[\s]*[=]+[\s]*)(\'|\").*(\'|\")", + r"\1\2********\3" + ) self.do_cmd_output_sub( "automount -m", r"(password=)[^,\s]*", diff -Nru sosreport-4.4/sos/report/plugins/azure.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/azure.py --- sosreport-4.4/sos/report/plugins/azure.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/azure.py 2023-05-26 22:32:49.000000000 +0530 @@ -41,9 +41,9 @@ self.add_copy_spec(self.path_join(path, name), sizelimit=limit) self.add_cmd_output(( - 'curl -s -H Metadata:true ' + 'curl -s -H Metadata:true --noproxy "*" ' '"http://169.254.169.254/metadata/instance/compute?' - 'api-version=2019-11-01"' + 'api-version=2021-01-01&format=json"' ), suggest_filename='instance_metadata.json') diff -Nru sosreport-4.4/sos/report/plugins/block.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/block.py --- sosreport-4.4/sos/report/plugins/block.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/block.py 2023-05-26 22:32:49.000000000 +0530 @@ -25,15 +25,15 @@ '/sys/block/.*/queue/scheduler': 'scheduler' }) + self.add_cmd_output("blkid -c /dev/null", tags="blkid") + self.add_cmd_output("ls -lanR /dev", tags="ls_dev") + self.add_cmd_output("lsblk", tags="lsblk") + self.add_cmd_output("lsblk -O -P", tags="lsblk_pairs") self.add_cmd_output([ - "lsblk", "lsblk -t", "lsblk -D", - "blkid -c /dev/null", "blockdev --report", - "ls -lanR /dev", "ls -lanR /sys/block", - "lsblk -O -P", "losetup -a", ]) @@ -52,11 +52,12 @@ cmds = [ "parted -s %(dev)s unit s print", - "fdisk -l %(dev)s", "udevadm info %(dev)s", "udevadm info -a %(dev)s" ] self.add_device_cmd(cmds, devices='block', blacklist='ram.*') + self.add_device_cmd("fdisk -l %(dev)s", blacklist="ram.*", + devices="block", tags="fdisk_l_sos") lsblk = self.collect_cmd_output("lsblk -f -a -l") # for LUKS devices, collect cryptsetup luksDump diff -Nru sosreport-4.4/sos/report/plugins/boot.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/boot.py --- sosreport-4.4/sos/report/plugins/boot.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/boot.py 2023-05-26 22:32:49.000000000 +0530 @@ -33,23 +33,26 @@ "/boot/yaboot.conf" ]) - self.add_cmd_output("ls -lanR /boot", tags="insights_ls_boot") - - self.add_cmd_output([ - "lsinitrd", - "ls -lanR /sys/firmware", - ]) + self.add_cmd_output("ls -lanR /boot", tags="ls_boot") + self.add_cmd_output("ls -lanR /sys/firmware", + tags="ls_sys_firmware") + self.add_cmd_output("lsinitrd", tags="lsinitrd") + self.add_cmd_output("mokutil --sb-state", + tags="mokutil_sbstate") self.add_cmd_output([ "efibootmgr -v", - "mokutil --sb-state" + "ls -l /initrd.img /boot/initrd.img", + "lsinitramfs -l /initrd.img", + "lsinitramfs -l /boot/initrd.img" ]) if self.get_option("all-images"): - for image in glob('/boot/initr*.img'): + for image in glob('/boot/initr*.img*'): if image[-9:] == "kdump.img": continue self.add_cmd_output("lsinitrd %s" % image, priority=100) + self.add_cmd_output("lsinitramfs -l %s" % image, priority=100) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/candlepin.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/candlepin.py --- sosreport-4.4/sos/report/plugins/candlepin.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/candlepin.py 2023-05-26 22:32:49.000000000 +0530 @@ -128,7 +128,7 @@ self.do_file_sub("/var/log/candlepin/cpdb.log", cpdbreg, repl) for key in ["trustStorePassword", "keyStorePassword"]: self.do_file_sub("/etc/candlepin/broker.xml", - r"%s=(\w*)([;<])" % key, - r"%s=********\2" % key) + r"(%s)=(\w*)([;<])" % key, + r"\1=********\3") # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/ceph_common.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_common.py --- sosreport-4.4/sos/report/plugins/ceph_common.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_common.py 2023-05-26 22:32:49.000000000 +0530 @@ -17,7 +17,7 @@ plugin_name = 'ceph_common' profiles = ('storage', 'virt', 'container') - containers = ('ceph-(mon|rgw|osd).*',) + containers = ('ceph-(.*-)?(mon|rgw|osd).*',) ceph_hostname = gethostname() packages = ( @@ -48,7 +48,7 @@ self.add_file_tags({ '.*/ceph.conf': 'ceph_conf', - '/var/log/ceph/ceph.log.*': 'ceph_log', + '/var/log/ceph(.*)?/ceph.log.*': 'ceph_log', }) if not all_logs: @@ -57,8 +57,8 @@ self.add_copy_spec("/var/log/calamari",) self.add_copy_spec([ - "/var/log/ceph/ceph.log", - "/var/log/ceph/ceph.audit.log*", + "/var/log/ceph/**/ceph.log", + "/var/log/ceph/**/ceph.audit.log*", "/var/log/calamari/*.log", "/etc/ceph/", "/etc/calamari/", diff -Nru sosreport-4.4/sos/report/plugins/ceph_iscsi.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_iscsi.py --- sosreport-4.4/sos/report/plugins/ceph_iscsi.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_iscsi.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,37 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, RedHatPlugin, UbuntuPlugin + + +class CephISCSI(Plugin, RedHatPlugin, UbuntuPlugin): + + short_desc = "CEPH iSCSI" + + plugin_name = "ceph_iscsi" + profiles = ("storage", "virt", "container") + packages = ("ceph-iscsi",) + services = ("rbd-target-api", "rbd-target-gw") + containers = ("rbd-target-api.*", "rbd-target-gw.*") + + def setup(self): + self.add_copy_spec([ + "/etc/tcmu/tcmu.conf", + "/var/log/**/ceph-client.*.log", + "/var/log/**/rbd-target-api.log", + "/var/log/**/rbd-target-gw.log", + "/var/log/**/tcmu-runner.log", + "/var/log/tcmu-runner.log" + ]) + + self.add_cmd_output([ + "gwcli info", + "gwcli ls" + ]) + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/ceph_mds.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_mds.py --- sosreport-4.4/sos/report/plugins/ceph_mds.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_mds.py 2023-05-26 22:32:49.000000000 +0530 @@ -7,17 +7,14 @@ # See the LICENSE file in the source distribution for further information. from sos.report.plugins import Plugin, RedHatPlugin, UbuntuPlugin -import glob class CephMDS(Plugin, RedHatPlugin, UbuntuPlugin): short_desc = 'CEPH mds' plugin_name = 'ceph_mds' profiles = ('storage', 'virt', 'container') - containers = ('ceph-fs.*',) - - def check_enabled(self): - return True if glob.glob('/var/lib/ceph/mds/*/*') else False + containers = ('ceph-(.*-)?fs.*',) + files = ('/var/lib/ceph/mds/',) def setup(self): self.add_file_tags({ diff -Nru sosreport-4.4/sos/report/plugins/ceph_mgr.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_mgr.py --- sosreport-4.4/sos/report/plugins/ceph_mgr.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_mgr.py 2023-05-26 22:32:49.000000000 +0530 @@ -6,47 +6,85 @@ # # See the LICENSE file in the source distribution for further information. +import os + from sos.report.plugins import Plugin, RedHatPlugin, UbuntuPlugin -import glob class CephMGR(Plugin, RedHatPlugin, UbuntuPlugin): + """ + This plugin is for capturing information from Ceph mgr nodes. While the + majority of this plugin should be version-agnostic, several collections are + dependent upon the version of Ceph installed. Versions that correlate to + RHCS 4 or RHCS 5 are explicitly handled for differences such as those + pertaining to log locations on the host filesystem. + + Note that while this plugin will activate based on the presence of Ceph + containers, commands are run directly on the host as those containers are + often not configured to successfully run the `ceph` commands collected by + this plugin. These commands are majorily `ceph daemon` commands that will + reference discovered admin sockets under /var/run/ceph. + + Users may expect to see several collections twice - once in standard output + from the `ceph` command, and again in JSON format. The latter of which will + be placed in the `json_output/` subdirectory within this plugin's directory + in the report archive. These JSON formatted collections are intended to + aid in automated analysis. + """ short_desc = 'CEPH mgr' plugin_name = 'ceph_mgr' profiles = ('storage', 'virt', 'container') - - containers = ('ceph-mgr.*',) - - def check_enabled(self): - return True if glob.glob('/var/lib/ceph/mgr/*/*') else False + files = ('/var/lib/ceph/mgr/', '/var/lib/ceph/*/mgr*') + containers = ('ceph-(.*-)?mgr.*',) def setup(self): + self.add_file_tags({ - '/var/log/ceph/ceph-mgr.*.log': 'ceph_mgr_log', + '/var/log/ceph/(.*/)?ceph-mgr.*.log': 'ceph_mgr_log', }) + self.add_forbidden_path([ + "/etc/ceph/*keyring*", + "/var/lib/ceph/**/*keyring*", + "/var/lib/ceph/**/osd*", + "/var/lib/ceph/**/mon*", + # Excludes temporary ceph-osd mount location like + # /var/lib/ceph/tmp/mnt.XXXX from sos collection. + "/var/lib/ceph/**/tmp/*mnt*", + "/etc/ceph/*bindpass*", + ]) + self.add_copy_spec([ - "/var/log/ceph/ceph-mgr*.log", - "/var/lib/ceph/mgr/", - "/var/lib/ceph/bootstrap-mgr/", - "/run/ceph/ceph-mgr*", + "/var/log/ceph/**/ceph-mgr*.log", + "/var/lib/ceph/**/mgr*", + "/var/lib/ceph/**/bootstrap-mgr/", + "/run/ceph/**/ceph-mgr*", ]) # more commands to be added later - self.add_cmd_output([ - "ceph balancer status", - "ceph orch host ls", - "ceph orch device ls", - "ceph orch ls --export", - "ceph orch ps", - "ceph orch status --detail", - "ceph orch upgrade status", - "ceph log last cephadm" + ceph_mgr_cmds = ([ + "balancer status", + "orch host ls", + "orch device ls", + "orch ls", + "orch ls --export", + "orch ps", + "orch status --detail", + "orch upgrade status", + "log last cephadm" ]) - ceph_cmds = [ + self.add_cmd_output( + [f"ceph {cmd}" for cmd in ceph_mgr_cmds]) + # get ceph_cmds again as json for easier automation parsing + self.add_cmd_output( + [f"ceph {cmd} --format json-pretty" for cmd in ceph_mgr_cmds], + subdir="json_output", + ) + + cmds = [ "config diff", "config show", "dump_cache", @@ -65,44 +103,20 @@ "version" ] - self.add_forbidden_path([ - "/etc/ceph/*keyring*", - "/var/lib/ceph/*keyring*", - "/var/lib/ceph/*/*keyring*", - "/var/lib/ceph/*/*/*keyring*", - "/var/lib/ceph/osd", - "/var/lib/ceph/mon", - # Excludes temporary ceph-osd mount location like - # /var/lib/ceph/tmp/mnt.XXXX from sos collection. - "/var/lib/ceph/tmp/*mnt*", - "/etc/ceph/*bindpass*", - ]) - - mgr_ids = [] - # Get the ceph user processes - out = self.exec_cmd('ps -u ceph -o args') - - if out['status'] == 0: - # Extract the OSD ids from valid output lines - for procs in out['output'].splitlines(): - proc = procs.split() - # Locate the '--id' value - if proc and proc[0].endswith("ceph-mgr"): - try: - id_index = proc.index("--id") - mgr_ids.append("mgr.%s" % proc[id_index+1]) - except (IndexError, ValueError): - self.log_warn("could not find ceph-mgr id: %s", procs) - - # If containerized, run commands in containers - try: - cname = self.get_all_containers_by_regex("ceph-mgr*")[0][1] - except Exception: - cname = None - self.add_cmd_output([ - "ceph daemon %s %s" - % (mgrid, cmd) for mgrid in mgr_ids for cmd in ceph_cmds - ], container=cname) + f"ceph daemon {m} {cmd}" for m in self.get_socks() for cmd in cmds] + ) + + def get_socks(self): + """ + Find any available admin sockets under /var/run/ceph (or subdirs for + later versions of Ceph) which can be used for ceph daemon commands + """ + ceph_sockets = [] + for rdir, dirs, files in os.walk('/var/run/ceph/'): + for file in files: + if file.startswith('ceph-mgr') and file.endswith('.asok'): + ceph_sockets.append(self.path_join(rdir, file)) + return ceph_sockets # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/ceph_mon.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_mon.py --- sosreport-4.4/sos/report/plugins/ceph_mon.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_mon.py 2023-05-26 22:32:49.000000000 +0530 @@ -6,46 +6,77 @@ # # See the LICENSE file in the source distribution for further information. +import re + from sos.report.plugins import Plugin, RedHatPlugin, UbuntuPlugin -import glob class CephMON(Plugin, RedHatPlugin, UbuntuPlugin): + """ + This plugin serves to collect information on monitor nodes within a Ceph + cluster. It is designed to collect from several versions of Ceph, including + those versions that serve as the basis for RHCS 4 and RHCS 5. + + Older versions of Ceph will have collections from locations such as + /var/log/ceph, whereas newer versions (as of this plugin's latest update) + will have collections from /var/log/ceph//. This plugin attempts to + account for this where possible across the host's filesystem. + + Users may expect to see several collections twice - once in standard output + from the `ceph` command, and again in JSON format. The latter of which will + be placed in the `json_output/` subdirectory within this plugin's directory + in the report archive. These JSON formatted collections are intended to + aid in automated analysis. + """ short_desc = 'CEPH mon' plugin_name = 'ceph_mon' profiles = ('storage', 'virt', 'container') - containers = ('ceph-mon.*',) - - def check_enabled(self): - return True if glob.glob('/var/lib/ceph/mon/*/*') else False + # note: for RHCS 5 / Ceph v16 the containers serve as an enablement trigger + # but by default they are not capable of running various ceph commands in + # this plugin - the `ceph` binary is functional directly on the host + containers = ('ceph-(.*-)?mon.*',) + files = ('/var/lib/ceph/mon/', '/var/lib/ceph/*/mon*') + ceph_version = 0 def setup(self): + + self.ceph_version = self.get_ceph_version() + self.add_file_tags({ '.*/ceph.conf': 'ceph_conf', - '/var/log/ceph/ceph-mon.*.log': 'ceph_mon_log' + "/var/log/ceph/(.*/)?ceph-.*mon.*.log": 'ceph_mon_log' }) + self.add_forbidden_path([ + "/etc/ceph/*keyring*", + "/var/lib/ceph/**/*keyring*", + # Excludes temporary ceph-osd mount location like + # /var/lib/ceph/tmp/mnt.XXXX from sos collection. + "/var/lib/ceph/**/tmp/*mnt*", + "/etc/ceph/*bindpass*" + ]) + self.add_copy_spec([ - "/run/ceph/ceph-mon*", - "/var/lib/ceph/mon/*/kv_backend", - "/var/log/ceph/ceph-mon*.log" + "/run/ceph/**/ceph-mon*", + "/var/lib/ceph/**/kv_backend", + "/var/log/ceph/**/*ceph-mon*.log" ]) + self.add_cmd_output("ceph report", tags="ceph_report") self.add_cmd_output([ # The ceph_mon plugin will collect all the "ceph ..." commands # which typically require the keyring. "ceph mon stat", "ceph quorum_status", - "ceph report", "ceph-disk list", "ceph versions", "ceph features", "ceph insights", "ceph crash stat", - "ceph crash ls", + "ceph config dump", "ceph config log", "ceph config generate-minimal-conf", "ceph config-key dump", @@ -58,13 +89,21 @@ "ceph mgr metadata", "ceph mgr module ls", "ceph mgr services", - "ceph mgr versions" + "ceph mgr versions", + "ceph log last 10000 debug cluster", + "ceph log last 10000 debug audit" ]) + crashes = self.collect_cmd_output('ceph crash ls') + if crashes['status'] == 0: + for crashln in crashes['output'].splitlines(): + if crashln.endswith('*'): + cid = crashln.split()[0] + self.add_cmd_output(f"ceph crash info {cid}") + ceph_cmds = [ "mon dump", "status", - "health detail", "device ls", "df", "df detail", @@ -73,7 +112,6 @@ "pg dump", "pg stat", "time-sync-status", - "osd tree", "osd stat", "osd df tree", "osd dump", @@ -86,63 +124,102 @@ "osd numa-status" ] - self.add_cmd_output([ - "ceph %s --format json-pretty" % s for s in ceph_cmds - ], subdir="json_output", tags="insights_ceph_health_detail") - - mon_ids = [] - # Get the ceph user processes - out = self.exec_cmd('ps -u ceph -o args') - - if out['status'] == 0: - # Extract the mon ids - for procs in out['output'].splitlines(): - proc = procs.split() - # Locate the '--id' value of ceph-mon - if proc and proc[0].endswith("ceph-mon"): - try: - id_index = proc.index("--id") - mon_ids.append(proc[id_index+1]) - except (IndexError, ValueError): - self.log_warn("could not find ceph-mon id: %s", procs) - - self.add_cmd_output([ - "ceph tell mon.%s mon_status" % mon_id for mon_id in mon_ids - ], subdir="json_output", tags="insights_ceph_health_detail") - - self.add_forbidden_path([ - "/etc/ceph/*keyring*", - "/var/lib/ceph/*keyring*", - "/var/lib/ceph/*/*keyring*", - "/var/lib/ceph/*/*/*keyring*", - # Excludes temporary ceph-osd mount location like - # /var/lib/ceph/tmp/mnt.XXXX from sos collection. - "/var/lib/ceph/tmp/*mnt*", - "/etc/ceph/*bindpass*" - ]) + self.add_cmd_output("ceph health detail --format json-pretty", + subdir="json_output", + tags="ceph_health_detail") + self.add_cmd_output("ceph osd tree --format json-pretty", + subdir="json_output", + tags="ceph_osd_tree") + self.add_cmd_output( + [f"ceph tell mon.{mid} mon_status" for mid in self.get_ceph_ids()], + subdir="json_output", + ) - # If containerized, run commands in containers - try: - cname = self.get_all_containers_by_regex("ceph-mon*")[0][1] - except Exception: - cname = None + self.add_cmd_output([f"ceph {cmd}" for cmd in ceph_cmds]) + # get ceph_cmds again as json for easier automation parsing self.add_cmd_output( - ["ceph %s" % cmd for cmd in ceph_cmds], - container=cname + [f"ceph {cmd} --format json-pretty" for cmd in ceph_cmds], + subdir="json_output", + ) + + def get_ceph_version(self): + ver = self.exec_cmd('ceph --version') + if ver['status'] == 0: + try: + _ver = ver['output'].split()[2] + return int(_ver.split('.')[0]) + except Exception as err: + self._log_debug(f"Could not determine ceph version: {err}") + self._log_error( + 'Failed to find ceph version, command collection will be limited' ) + return 0 + + def get_ceph_ids(self): + ceph_ids = [] + # ceph version 14 correlates to RHCS 4 + if self.ceph_version == 14 or self.ceph_version == 15: + # Get the ceph user processes + out = self.exec_cmd('ps -u ceph -o args') + + if out['status'] == 0: + # Extract the mon ids + for procs in out['output'].splitlines(): + proc = procs.split() + # Locate the '--id' value of the process + if proc and proc[0].endswith("ceph-mon"): + try: + id_index = proc.index("--id") + ceph_ids.append(proc[id_index + 1]) + except (IndexError, ValueError): + self._log_warn('Unable to find ceph IDs') + + # ceph version 16 is RHCS 5 + elif self.ceph_version >= 16: + stats = self.exec_cmd('ceph status') + if stats['status'] == 0: + try: + ret = re.search(r'(\s*mon: .* quorum) (.*) (\(.*\))', + stats['output']) + ceph_ids.extend(ret.groups()[1].split(',')) + except Exception as err: + self._log_debug(f"id determination failed: {err}") + return ceph_ids def postproc(self): - keys = [ - 'API_PASSWORD', - 'API_USER.*', - 'API_.*_KEY', - 'key', - '_secret', - 'rbd/mirror/peer/.*' - ] - creg = r"((\".*(%s)\":) \")(.*)(\".*)" % "|".join(keys) - self.do_cmd_output_sub('ceph config-key dump', creg, r'\1*******\5') + if self.ceph_version >= 16: + keys = [ + 'key', + 'username', + 'password', + '_secret', + 'rbd/mirror/peer/.*' + ] + # we need to do this iteratively, as config-key dump here contains + # nested json data written as strings, which may have multiple hits + # within the same line + for key in keys: + creg = fr'(((.*)({key}\\\": ))((\\\"(.*?)\\\")(.*)))' + self.do_cmd_output_sub( + 'ceph config-key dump', creg, r'\2\"******\"\8' + ) + else: + keys = [ + 'API_PASSWORD', + 'API_USER.*', + 'API_.*_KEY', + 'key', + '_secret', + 'rbd/mirror/peer/.*' + ] + + creg = fr"((\".*({'|'.join(keys)})\":) \")(.*)(\".*)" + self.do_cmd_output_sub( + 'ceph config-key dump', creg, r'\1*******\5' + ) + + self.do_cmd_private_sub('ceph config-key dump') # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/ceph_osd.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_osd.py --- sosreport-4.4/sos/report/plugins/ceph_osd.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_osd.py 2023-05-26 22:32:49.000000000 +0530 @@ -6,32 +6,54 @@ # # See the LICENSE file in the source distribution for further information. +import os + from sos.report.plugins import Plugin, RedHatPlugin, UbuntuPlugin -import glob class CephOSD(Plugin, RedHatPlugin, UbuntuPlugin): + """ + This plugin is for capturing information from Ceph OSD nodes. While the + majority of this plugin should be version agnotics, several collections are + dependent upon the version of Ceph installed. Versions that correlate to + RHCS 4 or RHCS 5 are explicitly handled for differences such as those + pertaining to log locations on the host filesystem. + + Note that while this plugin will activate based on the presence of Ceph + containers, commands are run directly on the host as those containers are + often not configured to successfully run the `ceph` commands collected by + this plugin. These commands are majorily `ceph daemon` commands that will + reference discovered admin sockets under /var/run/ceph. + """ short_desc = 'CEPH osd' plugin_name = 'ceph_osd' profiles = ('storage', 'virt', 'container') - containers = ('ceph-osd.*',) - - def check_enabled(self): - return True if glob.glob('/var/lib/ceph/osd/*/*') else False + containers = ('ceph-(.*-)?osd.*',) + files = ('/var/lib/ceph/osd/', '/var/lib/ceph/*/osd*') def setup(self): + self.add_file_tags({ - '/var/log/ceph/ceph-osd.*.log': 'ceph_osd_log', + "/var/log/ceph/(.*/)?ceph-(.*-)?osd.*.log": 'ceph_osd_log', }) + self.add_forbidden_path([ + "/etc/ceph/*keyring*", + "/var/lib/ceph/**/*keyring*", + # Excludes temporary ceph-osd mount location like + # /var/lib/ceph/tmp/mnt.XXXX from sos collection. + "/var/lib/ceph/**/tmp/*mnt*", + "/etc/ceph/*bindpass*" + ]) + # Only collect OSD specific files self.add_copy_spec([ - "/run/ceph/ceph-osd*", - "/var/lib/ceph/osd/*/kv_backend", - "/var/log/ceph/ceph-osd*.log", - "/var/log/ceph/ceph-volume*.log", + "/run/ceph/**/ceph-osd*", + "/var/lib/ceph/**/kv_backend", + "/var/log/ceph/**/ceph-osd*.log", + "/var/log/ceph/**/ceph-volume*.log", ]) self.add_cmd_output([ @@ -39,7 +61,7 @@ "ceph-volume lvm list" ]) - ceph_cmds = [ + cmds = [ "bluestore bluefs available", "config diff", "config show", @@ -62,41 +84,20 @@ "version", ] - osd_ids = [] - # Get the ceph user processes - out = self.exec_cmd('ps -u ceph -o args') - - if out['status'] == 0: - # Extract the OSD ids from valid output lines - for procs in out['output'].splitlines(): - proc = procs.split() - # Locate the '--id' value - if proc and proc[0].endswith("ceph-osd"): - try: - id_index = proc.index("--id") - osd_ids.append("osd.%s" % proc[id_index+1]) - except (IndexError, ValueError): - self.log_warn("could not find ceph-osd id: %s", procs) - - try: - cname = self.get_all_containers_by_regex("ceph-osd*")[0][1] - except Exception: - cname = None - self.add_cmd_output( - ["ceph daemon %s %s" % (i, c) for i in osd_ids for c in ceph_cmds], - container=cname + [f"ceph daemon {i} {c}" for i in self.get_socks() for c in cmds] ) - self.add_forbidden_path([ - "/etc/ceph/*keyring*", - "/var/lib/ceph/*keyring*", - "/var/lib/ceph/*/*keyring*", - "/var/lib/ceph/*/*/*keyring*", - # Excludes temporary ceph-osd mount location like - # /var/lib/ceph/tmp/mnt.XXXX from sos collection. - "/var/lib/ceph/tmp/*mnt*", - "/etc/ceph/*bindpass*" - ]) + def get_socks(self): + """ + Find any available admin sockets under /var/run/ceph (or subdirs for + later versions of Ceph) which can be used for ceph daemon commands + """ + ceph_sockets = [] + for rdir, dirs, files in os.walk('/var/run/ceph/'): + for file in files: + if file.endswith('.asok'): + ceph_sockets.append(self.path_join(rdir, file)) + return ceph_sockets # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/ceph_rgw.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_rgw.py --- sosreport-4.4/sos/report/plugins/ceph_rgw.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ceph_rgw.py 2023-05-26 22:32:49.000000000 +0530 @@ -7,7 +7,6 @@ # See the LICENSE file in the source distribution for further information. from sos.report.plugins import Plugin, RedHatPlugin, UbuntuPlugin -import glob class CephRGW(Plugin, RedHatPlugin, UbuntuPlugin): @@ -16,10 +15,8 @@ plugin_name = 'ceph_rgw' profiles = ('storage', 'virt', 'container', 'webserver') - containers = ('ceph-rgw.*',) - - def check_enabled(self): - return True if glob.glob('/var/lib/ceph/radosgw/*/*') else False + containers = ('ceph-(.*)?rgw.*',) + files = ('/var/lib/ceph/radosgw',) def setup(self): self.add_copy_spec('/var/log/ceph/ceph-client.rgw*.log', diff -Nru sosreport-4.4/sos/report/plugins/cgroups.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/cgroups.py --- sosreport-4.4/sos/report/plugins/cgroups.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/cgroups.py 2023-05-26 22:32:49.000000000 +0530 @@ -21,7 +21,7 @@ def setup(self): self.add_file_tags({ - '/proc/1/cgroups': 'insights_init_process_cgroup' + '/proc/1/cgroups': 'init_process_cgroup' }) self.add_copy_spec([ diff -Nru sosreport-4.4/sos/report/plugins/chrony.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/chrony.py --- sosreport-4.4/sos/report/plugins/chrony.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/chrony.py 2023-05-26 22:32:49.000000000 +0530 @@ -22,12 +22,12 @@ self.add_cmd_output([ "chronyc activity", "chronyc tracking", - "chronyc -n sources", "chronyc sourcestats", "chronyc serverstats", "chronyc ntpdata", "chronyc -n clients" ]) + self.add_cmd_output("chronyc -n sources", tags="chronyc_sources") class RedHatChrony(Chrony, RedHatPlugin): diff -Nru sosreport-4.4/sos/report/plugins/cloud_init.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/cloud_init.py --- sosreport-4.4/sos/report/plugins/cloud_init.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/cloud_init.py 2023-05-26 22:32:49.000000000 +0530 @@ -33,4 +33,8 @@ '/var/log/cloud-init*' ]) + self.add_file_tags({ + "/etc/cloud/cloud.cfg": "cloud_cfg_filtered" + }) + # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/cman.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/cman.py --- sosreport-4.4/sos/report/plugins/cman.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/cman.py 2023-05-26 22:32:49.000000000 +0530 @@ -53,7 +53,7 @@ self.do_file_sub( cluster_conf, r"(\s*\ + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, RedHatPlugin + + +class Fapolicyd(Plugin, RedHatPlugin): + + """ + This plugin collects configuration and some probes of Fapolicyd software + framework. + """ + short_desc = 'Fapolicyd framework' + + plugin_name = "fapolicyd" + packages = ("fapolicyd", ) + + def setup(self): + self.add_copy_spec([ + "/etc/fapolicyd/fapolicyd.conf", + "/etc/fapolicyd/compiled.rules", + "/etc/fapolicyd/fapolicyd.trust", + "/etc/fapolicyd/rules.d/", + "/etc/fapolicyd/trust.d/", + "/var/log/fapolicyd-access.log", + ]) + + self.add_cmd_output([ + "fapolicyd-cli --list", + "fapolicyd-cli --check-config", + "fapolicyd-cli --check-trustdb", + ]) + + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/filesys.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/filesys.py --- sosreport-4.4/sos/report/plugins/filesys.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/filesys.py 2023-05-26 22:32:49.000000000 +0530 @@ -46,9 +46,10 @@ "/etc/mtab", "/etc/fstab" ]) - self.add_cmd_output("mount -l", root_symlink="mount") + self.add_cmd_output("mount -l", root_symlink="mount", + tags="mount") self.add_cmd_output("df -al -x autofs", root_symlink="df", - tags='insights_df__al') + tags='df__al') self.add_cmd_output([ "df -ali -x autofs", "findmnt", @@ -67,7 +68,8 @@ mounts = '/proc/mounts' ext_fs_regex = r"^(/dev/\S+).+ext[234]\s+" for dev in self.do_regex_find_all(ext_fs_regex, mounts): - self.add_cmd_output("dumpe2fs %s %s" % (dumpe2fs_opts, dev)) + self.add_cmd_output("dumpe2fs %s %s" % (dumpe2fs_opts, dev), + tags="dumpe2fs_h") if self.get_option('frag'): self.add_cmd_output("e2freefrag %s" % (dev), priority=100) diff -Nru sosreport-4.4/sos/report/plugins/firewalld.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/firewalld.py --- sosreport-4.4/sos/report/plugins/firewalld.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/firewalld.py 2023-05-26 22:32:49.000000000 +0530 @@ -31,6 +31,9 @@ "/etc/firewalld/icmptypes/*.xml", "/etc/firewalld/services/*.xml", "/etc/firewalld/zones/*.xml", + "/etc/firewalld/ipsets/*.xml", + "/etc/firewalld/policies/*.xml", + "/etc/firewalld/helpers/*.xml", "/etc/sysconfig/firewalld", "/var/log/firewalld", ]) @@ -38,16 +41,16 @@ # use a 10s timeout to workaround dbus problems in # docker containers. self.add_cmd_output([ - "firewall-cmd --list-all-zones", "firewall-cmd --direct --get-all-chains", "firewall-cmd --direct --get-all-rules", "firewall-cmd --direct --get-all-passthroughs", + "firewall-cmd --get-log-denied", + "firewall-cmd --list-all-zones", "firewall-cmd --permanent --list-all-zones", "firewall-cmd --permanent --direct --get-all-chains", "firewall-cmd --permanent --direct --get-all-rules", "firewall-cmd --permanent --direct --get-all-passthroughs", - "firewall-cmd --state", - "firewall-cmd --get-log-denied" + "firewall-cmd --state" ], timeout=10, cmd_as_tag=True) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/firewall_tables.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/firewall_tables.py --- sosreport-4.4/sos/report/plugins/firewall_tables.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/firewall_tables.py 2023-05-26 22:32:49.000000000 +0530 @@ -10,11 +10,23 @@ class firewall_tables(Plugin, IndependentPlugin): + """Collects information about local firewall tables, such as iptables, + and nf_tables (via nft). Note that this plugin does _not_ collect firewalld + information, which is handled by a separate plugin. + + Collections from this plugin are largely gated byt the presence of relevant + kernel modules - for example, the plugin will not collect the nf_tables + ruleset if both the `nf_tables` and `nfnetlink` kernel modules are not + currently loaded (unless using the --allow-system-changes option). + """ short_desc = 'firewall tables' plugin_name = "firewall_tables" profiles = ('network', 'system') + files = ('/etc/nftables',) + kernel_mods = ('ip_tables', 'ip6_tables', 'nf_tables', 'nfnetlink', + 'ebtables') def collect_iptable(self, tablename): """ Collecting iptables rules for a table loads either kernel module diff -Nru sosreport-4.4/sos/report/plugins/foreman_installer.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/foreman_installer.py --- sosreport-4.4/sos/report/plugins/foreman_installer.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/foreman_installer.py 2023-05-26 22:32:49.000000000 +0530 @@ -56,8 +56,8 @@ # also hide passwords in yet different formats self.do_path_regex_sub( install_logs, - r"(\.|_|-)password(=\'|=|\", \")(\w*)", - r"\1password\2********") + r"((\.|_|-)password(=\'|=|\", \"))(\w*)", + r"\1********") self.do_path_regex_sub( "/var/log/foreman-installer/foreman-proxy*", r"(\s*proxy_password\s=) (.*)", @@ -88,10 +88,10 @@ def setup(self): self.add_file_tags({ - '/var/log/foreman-installer/satellite.log.*': - ['insights_satellite_log' 'satellite_installer_log'], - '/var/log/foreman-installer/capsule.log.*': - ['insights_capsule_log' 'capsule_installer_log'], + '/var/log/foreman-installer/satellite.log': + ['foreman_satellite_log' 'satellite_installer_log'], + '/var/log/foreman-installer/capsule.log': + ['capsule_log' 'capsule_installer_log'], }) super(RedHatForemanInstaller, self).setup() diff -Nru sosreport-4.4/sos/report/plugins/foreman_openscap.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/foreman_openscap.py --- sosreport-4.4/sos/report/plugins/foreman_openscap.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/foreman_openscap.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,26 @@ +# Copyright (C) 2023 Red Hat, Inc., Pavel Moravec + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, IndependentPlugin + + +class ForemanOpenSCAP(Plugin, IndependentPlugin): + + short_desc = 'Foreman OpenSCAP client' + + plugin_name = 'foreman_openscap' + profiles = ('sysmgmt',) + + packages = ('rubygem-foreman_scap_client', 'ruby-foreman-scap-client') + + def setup(self): + self.add_copy_spec("/etc/foreman_scap_client/config.yaml") + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/foreman_proxy.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/foreman_proxy.py --- sosreport-4.4/sos/report/plugins/foreman_proxy.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/foreman_proxy.py 2023-05-26 22:32:49.000000000 +0530 @@ -22,7 +22,7 @@ def setup(self): self.add_file_tags({ - '/var/log/foreman-proxy/proxy.log.*': 'foreman_proxy_log', + '/var/log/foreman-proxy/proxy.log': 'foreman_proxy_log', '/etc/foreman-proxy/settings.yml': 'foreman_proxy_conf' }) diff -Nru sosreport-4.4/sos/report/plugins/foreman.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/foreman.py --- sosreport-4.4/sos/report/plugins/foreman.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/foreman.py 2023-05-26 22:32:49.000000000 +0530 @@ -72,7 +72,9 @@ self.add_file_tags({ '/var/log/foreman/production.log.*': 'foreman_production_log', '/etc/sysconfig/foreman-tasks': 'foreman_tasks_config', - '/etc/sysconfig/dynflowd': 'foreman_tasks_config' + '/etc/sysconfig/dynflowd': 'foreman_tasks_config', + '/var/log/httpd/foreman-ssl_access_ssl.log': + 'foreman_ssl_access_ssl_log' }) self.add_forbidden_path([ @@ -118,7 +120,6 @@ ]) self.add_cmd_output([ - 'hammer ping', 'foreman-selinux-relabel -nv', 'passenger-status --show pool', 'passenger-status --show requests', @@ -130,6 +131,14 @@ 'ping -c1 -W1 %s' % _host_f, 'ping -c1 -W1 localhost' ]) + self.add_cmd_output( + 'qpid-stat -b amqps://localhost:5671 -q \ + --ssl-certificate=/etc/pki/katello/qpid_router_client.crt \ + --ssl-key=/etc/pki/katello/qpid_router_client.key \ + --sasl-mechanism=ANONYMOUS', + suggest_filename='qpid-stat_-q' + ) + self.add_cmd_output("hammer ping", tags="hammer_ping") # Dynflow Sidekiq self.add_cmd_output('systemctl list-units dynflow*', diff -Nru sosreport-4.4/sos/report/plugins/frr.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/frr.py --- sosreport-4.4/sos/report/plugins/frr.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/frr.py 2023-05-26 22:32:49.000000000 +0530 @@ -10,6 +10,14 @@ class Frr(Plugin, RedHatPlugin): + """ + FRR is a routing project that provides numerous traditional routing + protocols for Linux platforms. In particular, OpenStack uses FRR to provide + BGP functionality for the overcloud nodes. + + This plugin is primarily designed the deployment of FRR within OSP + environments, which deploy FRR in a container. + """ short_desc = 'Frr routing service' @@ -18,8 +26,30 @@ files = ('/etc/frr/zebra.conf',) packages = ('frr',) + containers = ('frr',) def setup(self): self.add_copy_spec("/etc/frr/") + if self.container_exists('frr'): + subcmds = [ + 'show bgp detail', + 'show bgp neighbors', + 'show bgp summary', + 'show history', + 'show ip bgp detail', + 'show ip bgp neighbors', + 'show ip bgp summary', + 'show ip bgp', + 'show ip route', + 'show ipv6 route', + 'show running-config', + 'show version', + ] + + self.add_cmd_output( + [f"vtysh -c '{subcmd}'" for subcmd in subcmds], + container='frr' + ) + # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/fwupd.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/fwupd.py --- sosreport-4.4/sos/report/plugins/fwupd.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/fwupd.py 2023-05-26 22:32:49.000000000 +0530 @@ -34,14 +34,14 @@ def postproc(self): self.do_path_regex_sub( "/etc/fwupd/remotes.d/*", - r"Password=(.*)", - r"Password=********" + r"(Password)=(.*)", + r"\1=********" ) self.do_file_sub( "/etc/fwupd/redfish.conf", - r"Password=(.*)", - r"Password=********" + r"(Password)=(.*)", + r"\1=********" ) diff -Nru sosreport-4.4/sos/report/plugins/gcp.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/gcp.py --- sosreport-4.4/sos/report/plugins/gcp.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/gcp.py 2023-05-26 22:32:49.000000000 +0530 @@ -59,20 +59,19 @@ # Capture gcloud auth list self.add_cmd_output("gcloud auth list", tags=['gcp']) - # Get and store Metadata - try: - self.metadata = self.get_metadata() - self.scrub_metadata() - self.add_string_as_file(json.dumps(self.metadata, indent=4), - "metadata.json", plug_dir=True, - tags=['gcp']) - except RuntimeError as err: - self.add_string_as_file(str(err), 'metadata.json', - plug_dir=True, tags=['gcp']) - # Add journal entries self.add_journal(units="google*", tags=['gcp']) + def collect(self): + # Get and store Metadata + with self.collection_file('metadata.json', tags=['gcp']) as mfile: + try: + self.metadata = self.get_metadata() + self.scrub_metadata() + mfile.write(json.dumps(self.metadata, indent=4)) + except RuntimeError as err: + mfile.write(str(err)) + def get_metadata(self) -> dict: """ Retrieves metadata from the Metadata Server and transforms it into a diff -Nru sosreport-4.4/sos/report/plugins/gluster.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/gluster.py --- sosreport-4.4/sos/report/plugins/gluster.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/gluster.py 2023-05-26 22:32:49.000000000 +0530 @@ -61,11 +61,10 @@ "/var/lib/glusterd/glusterfind/glusterfind_*_secret.pem" ) - self.add_cmd_output([ - "gluster peer status", - "gluster pool list", - "gluster volume status" - ]) + self.add_cmd_output("gluster peer status", tags="gluster_peer_status") + self.add_cmd_output("gluster pool list") + self.add_cmd_output("gluster volume status", + tags="gluster_v_status") self.add_copy_spec([ "/etc/redhat-storage-release", @@ -104,14 +103,15 @@ self.soslog.info("could not send SIGUSR1 to glusterfs/" "glusterd processes") else: - self.soslog.warn("Unable to generate statedumps, no such " - "directory: %s" % self.statedump_dir) + self.soslog.warning("Unable to generate statedumps, no such " + "directory: %s" % self.statedump_dir) state = self.exec_cmd("gluster get-state") if state['status'] == 0: state_file = state['output'].split()[-1] self.add_copy_spec(state_file) - volume_cmd = self.collect_cmd_output("gluster volume info") + volume_cmd = self.collect_cmd_output("gluster volume info", + tags="gluster_v_info") if volume_cmd['status'] == 0: for line in volume_cmd['output'].splitlines(): if not line.startswith("Volume Name:"): diff -Nru sosreport-4.4/sos/report/plugins/gnocchi.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/gnocchi.py --- sosreport-4.4/sos/report/plugins/gnocchi.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/gnocchi.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,117 +0,0 @@ -# Copyright (C) 2016 Red Hat, Inc., Sachin Patil -# Copyright (C) 2017 Red Hat, Inc., Martin Schuppert - -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -import os -from sos.report.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin - - -class Gnocchi(Plugin): - - short_desc = 'Gnocchi - Metric as a service' - plugin_name = "gnocchi" - - profiles = ('openstack', 'openstack_controller') - - def setup(self): - self.add_copy_spec([ - "/etc/gnocchi/*", - ]) - - if self.get_option("all_logs"): - self.add_copy_spec([ - "/var/log/gnocchi/*", - "/var/log/{}*/gnocchi*".format(self.apachepkg) - ]) - else: - self.add_copy_spec([ - "/var/log/gnocchi/*.log", - "/var/log/{}*/gnocchi*.log".format(self.apachepkg) - ]) - - vars_all = [p in os.environ for p in [ - 'OS_USERNAME', 'OS_PASSWORD']] - - vars_any = [p in os.environ for p in [ - 'OS_TENANT_NAME', 'OS_PROJECT_NAME']] - - if not (all(vars_all) and any(vars_any)): - self.soslog.warning("Not all environment variables set. Source " - "the environment file for the user intended " - "to connect to the OpenStack environment.") - else: - self.add_cmd_output([ - "gnocchi --version", - "gnocchi status", - "gnocchi capabilities list", - "gnocchi archive-policy list", - "gnocchi resource list", - "gnocchi resource-type list" - ]) - - def postproc(self): - self.do_file_sub( - "/etc/gnocchi/gnocchi.conf", - r"ceph_secret\s?=(.*)", - r"ceph_secret=*****", - ) - self.do_file_sub( - "/etc/gnocchi/gnocchi.conf", - r"password\s?=(.*)", - r"password=*****", - ) - self.do_file_sub( - "/etc/gnocchi/gnocchi.conf", - r"memcache_secret_key\s?=(.*)", - r"memcache_secret_key=*****", - ) - - -class RedHatGnocchi(Gnocchi, RedHatPlugin): - - apachepkg = 'httpd' - var_puppet_gen = "/var/lib/config-data/puppet-generated/gnocchi" - - packages = ( - 'openstack-gnocchi-metricd', 'openstack-gnocchi-common', - 'openstack-gnocchi-statsd', 'openstack-gnocchi-api', - 'openstack-gnocchi-carbonara' - ) - - def setup(self): - super(RedHatGnocchi, self).setup() - self.add_copy_spec([ - self.var_puppet_gen + "/etc/gnocchi/*", - self.var_puppet_gen + "/etc/httpd/conf/*", - self.var_puppet_gen + "/etc/httpd/conf.d/*", - self.var_puppet_gen + "/etc/httpd/conf.modules.d/wsgi.conf", - self.var_puppet_gen + "/etc/my.cnf.d/tripleo.cnf" - ]) - - def postproc(self): - super(RedHatGnocchi, self).postproc() - self.do_file_sub( - self.var_puppet_gen + "/etc/gnocchi/" - "gnocchi.conf", - r"password=(.*)", - r"password=*****", - ) - - -class DebianGnocchi(Gnocchi, DebianPlugin, UbuntuPlugin): - - apachepkg = 'apache' - - packages = ( - 'gnocchi-api', 'gnocchi-metricd', 'gnocchi-common' - 'gnocchi-statsd', 'python3-gnocchiclient' - ) - -# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/grub2.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/grub2.py --- sosreport-4.4/sos/report/plugins/grub2.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/grub2.py 2023-05-26 22:32:49.000000000 +0530 @@ -21,7 +21,8 @@ self.add_file_tags({ '/boot/grub2/grub.cfg': 'grub2_cfg', - '/boot/efi/.*/grub.cfg': 'grub2_efi_cfg' + '/boot/efi/.*/grub.cfg': 'grub2_efi_cfg', + '/boot/grub2/grubenv': 'grubenv' }) self.add_copy_spec([ diff -Nru sosreport-4.4/sos/report/plugins/hardware.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/hardware.py --- sosreport-4.4/sos/report/plugins/hardware.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/hardware.py 2023-05-26 22:32:49.000000000 +0530 @@ -30,7 +30,8 @@ "/sys/class/drm/*/edid" ]) - self.add_cmd_output("dmidecode", root_symlink="dmidecode") + self.add_cmd_output("dmidecode", root_symlink="dmidecode", + tags="dmidecode") self.add_cmd_output("lshw") diff -Nru sosreport-4.4/sos/report/plugins/host.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/host.py --- sosreport-4.4/sos/report/plugins/host.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/host.py 2023-05-26 22:32:49.000000000 +0530 @@ -30,14 +30,14 @@ self.add_forbidden_path('/etc/sos/cleaner') - self.add_cmd_output('hostname', root_symlink='hostname') - self.add_cmd_output('uptime', root_symlink='uptime') - + self.add_cmd_output('hostname', root_symlink='hostname', + tags=['hostname_default', 'hostname_short']) + self.add_cmd_output('hostname -f', tags='hostname') + self.add_cmd_output('uptime', root_symlink='uptime', tags="uptime") self.add_cmd_output('find / -maxdepth 2 -type l -ls', root_symlink='root-symlinks') self.add_cmd_output([ - 'hostname -f', 'hostid', 'hostnamectl status' ]) diff -Nru sosreport-4.4/sos/report/plugins/hpssm.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/hpssm.py --- sosreport-4.4/sos/report/plugins/hpssm.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/hpssm.py 2023-05-26 22:32:49.000000000 +0530 @@ -34,10 +34,12 @@ 'ctrl all show status' ] slot_subcmds = [ + 'array all show detail', 'ld all show', 'ld all show detail', 'pd all show', - 'pd all show detail' + 'pd all show detail', + 'show detail' ] self.add_cmd_output( ["%s %s" % (cmd, subcmd) for subcmd in subcmds] diff -Nru sosreport-4.4/sos/report/plugins/i18n.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/i18n.py --- sosreport-4.4/sos/report/plugins/i18n.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/i18n.py 2023-05-26 22:32:49.000000000 +0530 @@ -21,6 +21,6 @@ "/etc/X11/xinit/xinput.d/*", "/etc/locale.conf" ]) - self.add_cmd_output("locale", env={'LC_ALL': None}) + self.add_cmd_output("locale", env={'LC_ALL': None}, tags="locale") # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/__init__.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/__init__.py --- sosreport-4.4/sos/report/plugins/__init__.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -16,7 +16,8 @@ listdir, path_join, bold, file_is_binary, recursive_dict_values_by_key) -from sos.archive import P_FILE +from sos.archive import P_FILE, P_LINK +import contextlib import os import glob import re @@ -562,6 +563,7 @@ self.skip_files = commons['cmdlineopts'].skip_files self.skip_commands = commons['cmdlineopts'].skip_commands self.default_environment = {} + self._tail_files_list = [] self.soslog = self.commons['soslog'] if 'soslog' in self.commons \ else logging.getLogger('sos') @@ -614,6 +616,7 @@ self.manifest.add_list('files', []) self.manifest.add_field('strings', {}) self.manifest.add_field('containers', {}) + self.manifest.add_list('collections', []) def set_default_cmd_environment(self, env_vars): """ @@ -1281,7 +1284,8 @@ content = readable.read() if not isinstance(content, str): content = content.decode('utf8', 'ignore') - result, replacements = re.subn(regexp, subst, content) + result, replacements = re.subn(regexp, subst, content, + flags=re.IGNORECASE) if replacements: self.archive.add_string(result, srcpath) else: @@ -1558,8 +1562,8 @@ """ global_options = ( - 'all_logs', 'allow_system_changes', 'cmd_timeout', 'log_size', - 'plugin_timeout', 'since', 'verify' + 'all_logs', 'allow_system_changes', 'cmd_timeout', 'journal_size', + 'log_size', 'plugin_timeout', 'since', 'verify' ) if optionname in global_options: @@ -1602,10 +1606,11 @@ :returns: The tag(s) associated with `fname` :rtype: ``list`` of strings """ + tags = [] for key, val in self.filetags.items(): if re.match(key, fname): - return val - return [] + tags.extend(val) + return tags def generate_copyspec_tags(self): """After file collections have completed, retroactively generate @@ -1620,16 +1625,16 @@ matched_files = [] for cfile in self.copied_files: if re.match(file_regex, cfile['srcpath']): - matched_files.append(cfile['dstpath']) + matched_files.append(cfile['dstpath'].lstrip('/')) if matched_files: manifest_data['files_copied'] = matched_files self.manifest.files.append(manifest_data) def add_copy_spec(self, copyspecs, sizelimit=None, maxage=None, tailit=True, pred=None, tags=[], container=None): - """Add a file, directory, or regex matching filepaths to the archive + """Add a file, directory, or globs matching filepaths to the archive - :param copyspecs: A file, directory, or regex matching filepaths + :param copyspecs: Files, directories, or globs matching filepaths :type copyspecs: ``str`` or a ``list`` of strings :param sizelimit: Limit the total size of collections from `copyspecs` @@ -1702,12 +1707,15 @@ """Generate a tag to add for a single file copyspec This tag will be set to the filename, minus any extensions - except '.conf' which will be converted to '_conf' + except for special extensions like .conf or .log, which will be + mangled to _conf or similar. """ - fname = fname.replace('-', '_') - if fname.endswith('.conf'): - return fname.replace('.', '_') - return fname.split('.')[0] + if fname.startswith(('/proc', '/sys')): + return + _fname = fname.split('/')[-1] + _fname = _fname.replace('-', '_') + if _fname.endswith(('.conf', '.log', '.txt')): + return _fname.replace('.', '_') for copyspec in copyspecs: if not (copyspec and len(copyspec)): @@ -1724,7 +1732,10 @@ _spec_tags = [] if len(files) == 1: - _spec_tags = [get_filename_tag(files[0].split('/')[-1])] + _spec = get_filename_tag(files[0]) + if _spec: + _spec_tags.append(_spec) + _spec_tags.extend(self.get_tags_for_file(files[0])) _spec_tags.extend(tags) _spec_tags = list(set(_spec_tags)) @@ -1772,7 +1783,7 @@ # operations continue - # Files hould be sorted in most-recently-modified order, so that + # Files should be sorted in most-recently-modified order, so that # we collect the newest data first before reaching the limit. def getmtime(path): try: @@ -1836,24 +1847,21 @@ if sizelimit and current_size > sizelimit: limit_reached = True - if tailit and not file_is_binary(_file): - self._log_info("collecting tail of '%s' due to size " - "limit" % _file) - file_name = _file - if file_name[0] == os.sep: - file_name = file_name.lstrip(os.sep) - strfile = ( - file_name.replace(os.path.sep, ".") + ".tailed" + if tailit: + if file_is_binary(_file): + self._log_info( + "File '%s' is over size limit and is binary. " + "Skipping collection." % _file + ) + continue + + self._log_info( + "File '%s' is over size limit, will instead tail " + "the file during collection phase." % _file ) add_size = sizelimit + file_size - current_size - self.add_string_as_file(tail(_file, add_size), strfile) - rel_path = os.path.relpath('/', os.path.dirname(_file)) - link_path = os.path.join(rel_path, 'sos_strings', - self.name(), strfile) - self.archive.add_link(link_path, _file) + self._tail_files_list.append((_file, add_size)) _manifest_files.append(_file.lstrip('/')) - else: - self._log_info("skipping '%s' over size limit" % _file) else: # size limit not exceeded, copy the file _manifest_files.append(_file.lstrip('/')) @@ -2312,7 +2320,6 @@ tags = [tags] _tags.extend(tags) - _tags.append(cmd.split(' ')[0]) _tags.extend(self.get_tags_for_cmd(cmd)) if cmd_as_tag: @@ -2639,8 +2646,11 @@ cmdfn = self._mangle_command(cmd) conlnk = "%s/%s" % (_cdir, cmdfn) - self.archive.check_path(conlnk, P_FILE) - os.symlink(_outloc, self.archive.dest_path(conlnk)) + # If check_path return None, it means that the sym link already exits, + # so to avoid Error 17, trying to recreate, we will skip creation and + # trust on the existing sym link (e.g. duplicate command) + if self.archive.check_path(conlnk, P_LINK): + os.symlink(_outloc, self.archive.dest_path(conlnk)) manifest['filepath'] = conlnk self.manifest.containers[container]['commands'].append(manifest) @@ -2923,13 +2933,11 @@ identifier_opt = " --identifier %s" catalog_opt = " --catalog" - journal_size = 100 - all_logs = self.get_option("all_logs") - log_size = sizelimit or self.get_option("log_size") - log_size = max(log_size, journal_size) if not all_logs else 0 - if sizelimit == 0: + if sizelimit == 0 or self.get_option("all_logs"): # allow for specific sizelimit overrides in plugins log_size = 0 + else: + log_size = sizelimit or self.get_option('journal_size') if isinstance(units, str): units = [units] @@ -3076,6 +3084,21 @@ self._log_info("collecting output of '%s'" % soscmd.cmd) self._collect_cmd_output(**soscmd.__dict__) + def _collect_tailed_files(self): + for _file, _size in self._tail_files_list: + self._log_info(f"collecting tail of '{_file}' due to size limit") + file_name = _file + if file_name[0] == os.sep: + file_name = file_name.lstrip(os.sep) + strfile = ( + file_name.replace(os.path.sep, ".") + ".tailed" + ) + self.add_string_as_file(tail(_file, _size), strfile) + rel_path = os.path.relpath('/', os.path.dirname(_file)) + link_path = os.path.join(rel_path, 'sos_strings', + self.name(), strfile) + self.archive.add_link(link_path, _file) + def _collect_strings(self): for string, file_name, tags in self.copy_strings: if self._timeout_hit: @@ -3092,13 +3115,80 @@ self._log_debug("could not add string '%s': %s" % (file_name, e)) + def _collect_manual(self): + """Kick off manual collections performed by the plugin. These manual + collections are anything the plugin collects outside of existing + files and/or command output. Anything the plugin manually compiles or + constructs for data that is included in the final archive. + + Plugins will need to define these collections by overriding the + ``collect()`` method, similar to how plugins define their own + ``setup()`` methods. + """ + try: + self.collect() + except Exception as err: + self._log_error(f"Error during plugin collections: {err}") + def collect(self): + """If a plugin needs to manually compile some data for a collection, + that should be specified here by overriding this method. + + These collections are run last during a plugin's execution, and as such + are more likely to be interrupted by timeouts than file or command + output collections. + """ + pass + + @contextlib.contextmanager + def collection_file(self, fname, subdir=None, tags=[]): + """Handles creating and managing files within a plugin's subdirectory + within the archive, and is intended to be used to save manually + compiled data generated during a plugin's ``_collect_manual()`` step + of the collection phase. + + Plugins should call this method using a ``with`` context manager. + + :param fname: The name of the file within the plugin directory + :type fname: ``str`` + + :param subdir: If needed, specify a subdir to write the file to + :type subdir: ``str`` + + :param tags: Tags to be added to this file in the manifest + :type tags: ``str`` or ``list`` of ``str``s + """ + try: + start = time() + _pfname = self._make_command_filename(fname, subdir=subdir) + self.archive.check_path(_pfname, P_FILE) + _name = self.archive.dest_path(_pfname) + _file = open(_name, 'w') + self._log_debug(f"manual collection file opened: {_name}") + yield _file + _file.close() + end = time() + run = end - start + self._log_info(f"manual collection '{fname}' finished in {run}") + if isinstance(tags, str): + tags = [tags] + self.manifest.collections.append({ + 'name': fname, + 'filepath': _pfname, + 'tags': tags + }) + except Exception as err: + self._log_info(f"Error with collection file '{fname}': {err}") + + def collect_plugin(self): """Collect the data for a plugin.""" start = time() self._collect_copy_specs() self._collect_container_copy_specs() - self._collect_cmds() + self._collect_tailed_files() self._collect_strings() + self._collect_cmds() + self._collect_manual() fields = (self.name(), time() - start) self._log_debug("collected plugin '%s' in %s" % fields) @@ -3223,6 +3313,9 @@ if self.is_service(service): self.add_service_status(service) self.add_journal(service) + for kmod in self.kernel_mods: + if self.is_module_loaded(kmod): + self.add_cmd_output(f"modinfo {kmod}") def setup(self): """Collect the list of files declared by the plugin. This method diff -Nru sosreport-4.4/sos/report/plugins/insights.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/insights.py --- sosreport-4.4/sos/report/plugins/insights.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/insights.py 2023-05-26 22:32:49.000000000 +0530 @@ -28,6 +28,8 @@ config = ( '/etc/insights-client/insights-client.conf', '/etc/insights-client/.registered', + '/etc/insights-client/tags.yaml', + '/etc/insights-client/malware-detection-config.yml', '/etc/redhat-access-insights/redhat-access-insights.conf' ) diff -Nru sosreport-4.4/sos/report/plugins/ipa.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ipa.py --- sosreport-4.4/sos/report/plugins/ipa.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ipa.py 2023-05-26 22:32:49.000000000 +0530 @@ -162,10 +162,17 @@ getcert_pred = SoSPredicate(self, services=['certmonger']) - self.add_cmd_output("getcert list", pred=getcert_pred) + self.add_cmd_output("getcert list", pred=getcert_pred, + tags="getcert_list") for certdb_directory in glob("/etc/dirsrv/slapd-*/"): self.add_cmd_output("certutil -L -d %s" % certdb_directory) + + self.add_file_tags({ + "/var/log/ipa/healthcheck/healthcheck.log": + "freeipa_healthcheck_log" + }) + return def postproc(self): diff -Nru sosreport-4.4/sos/report/plugins/iprconfig.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/iprconfig.py --- sosreport-4.4/sos/report/plugins/iprconfig.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/iprconfig.py 2023-05-26 22:32:49.000000000 +0530 @@ -9,7 +9,7 @@ # This plugin enables collection of logs for Power systems import re -from sos.report.plugins import Plugin, IndependentPlugin +from sos.report.plugins import Plugin, IndependentPlugin, SoSPredicate class IprConfig(Plugin, IndependentPlugin): @@ -22,6 +22,13 @@ def setup(self): + show_ioas = self.collect_cmd_output( + "iprconfig -c show-ioas", + pred=SoSPredicate(self, kmods=['sg']) + ) + if not show_ioas['status'] == 0: + return + self.add_cmd_output([ "iprconfig -c show-config", "iprconfig -c show-alt-config", @@ -35,10 +42,6 @@ "iprconfig -c dump" ]) - show_ioas = self.collect_cmd_output("iprconfig -c show-ioas") - if not show_ioas['status'] == 0: - return - devices = [] if show_ioas['output']: p = re.compile('sg') diff -Nru sosreport-4.4/sos/report/plugins/iscsi.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/iscsi.py --- sosreport-4.4/sos/report/plugins/iscsi.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/iscsi.py 2023-05-26 22:32:49.000000000 +0530 @@ -41,11 +41,18 @@ # to # # node.session.auth.password = ******** - nodesessionpwd = r"(node\.session\.auth\.password\s+=\s+)(\S+)" - discoverypwd = r"(discovery\.sendtargets\.auth\.password\s+=\s+)(\S+)" + nodesessionpwd = r"(node\.session\.auth\.password(_in)?\s+=\s+)(\S+)" + discoverypwd = ( + r"(discovery\.sendtargets\.auth\.password(_in)?" + r"\s+=\s+)(\S+)" + ) repl = r"\1********\n" self.do_path_regex_sub('/etc/iscsi/iscsid.conf', nodesessionpwd, repl) self.do_path_regex_sub('/etc/iscsi/iscsid.conf', discoverypwd, repl) + self.do_path_regex_sub( + '/var/lib/iscsi/nodes/*/*/*', nodesessionpwd, repl) + self.do_path_regex_sub( + '/var/lib/iscsi/nodes/*/*/*', discoverypwd, repl) class RedHatIscsi(Iscsi, RedHatPlugin): diff -Nru sosreport-4.4/sos/report/plugins/juju.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/juju.py --- sosreport-4.4/sos/report/plugins/juju.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/juju.py 2023-05-26 22:32:49.000000000 +0530 @@ -44,7 +44,8 @@ # want all logs you want this too. self.add_copy_spec([ "/var/log/juju", - "/var/lib/juju" + "/var/lib/juju", + "/var/lib/juju/**/.*", ]) self.add_forbidden_path("/var/lib/juju/kvm") else: diff -Nru sosreport-4.4/sos/report/plugins/kdump.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/kdump.py --- sosreport-4.4/sos/report/plugins/kdump.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/kdump.py 2023-05-26 22:32:49.000000000 +0530 @@ -7,7 +7,8 @@ # See the LICENSE file in the source distribution for further information. import platform -from sos.report.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin +from sos.report.plugins import Plugin, PluginOpt, RedHatPlugin, DebianPlugin, \ + UbuntuPlugin, CosPlugin class KDump(Plugin): @@ -29,10 +30,12 @@ "/sys/kernel/fadump_registered", "/sys/kernel/fadump/registered", "/sys/kernel/fadump/mem_reserved", - "/sys/kernel/kexec_crash_loaded", "/sys/kernel/kexec_crash_size" ]) + self.add_copy_spec("/sys/kernel/kexec_crash_loaded", + tags="kexec_crash_loaded") + class RedHatKDump(KDump, RedHatPlugin): @@ -70,10 +73,11 @@ self.add_copy_spec([ "/etc/kdump.conf", "/etc/udev/rules.d/*kexec.rules", - "/var/crash/*/vmcore-dmesg.txt", "/var/crash/*/kexec-dmesg.log", "/var/log/kdump.log" ]) + self.add_copy_spec("/var/crash/*/vmcore-dmesg.txt", + tags="vmcore_dmesg") try: path = self.read_kdump_conffile() except Exception: @@ -102,4 +106,18 @@ "/etc/default/kdump-tools" ]) + +class CosKDump(KDump, CosPlugin): + + option_list = [ + PluginOpt(name="collect-kdumps", default=False, + desc="Collect existing kdump files"), + ] + + def setup(self): + super(CosKDump, self).setup() + self.add_cmd_output('ls -alRh /var/kdump*') + if self.get_option("collect-kdumps"): + self.add_copy_spec(["/var/kdump-*"]) + # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/kernel.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/kernel.py --- sosreport-4.4/sos/report/plugins/kernel.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/kernel.py 2023-05-26 22:32:49.000000000 +0530 @@ -43,8 +43,8 @@ def setup(self): # compat - self.add_cmd_output("uname -a", root_symlink="uname") - self.add_cmd_output("lsmod", root_symlink="lsmod") + self.add_cmd_output("uname -a", root_symlink="uname", tags="uname") + self.add_cmd_output("lsmod", root_symlink="lsmod", tags="lsmod") self.add_cmd_output("ls -lt /sys/kernel/slab") try: @@ -70,9 +70,9 @@ self.add_cmd_output([ "dmesg", - "sysctl -a", "dkms status" ], cmd_as_tag=True) + self.add_cmd_output("sysctl -a", tags="sysctl") clocksource_path = "/sys/devices/system/clocksource/clocksource0/" @@ -111,6 +111,9 @@ "/etc/modules.conf", "/etc/modprobe.conf", "/etc/modprobe.d", + "/lib/modprobe.d", + "/run/modprobe.d", + "/usr/local/lib/modprobe.d", "/etc/sysctl.conf", "/etc/sysctl.d", "/lib/sysctl.d", @@ -122,6 +125,7 @@ "/proc/misc", "/var/log/dmesg", "/sys/fs/pstore", + "/var/lib/systemd/pstore", "/sys/kernel/debug/dynamic_debug/control", "/sys/kernel/debug/extfrag/unusable_index", "/sys/kernel/debug/extfrag/extfrag_index", diff -Nru sosreport-4.4/sos/report/plugins/krb5.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/krb5.py --- sosreport-4.4/sos/report/plugins/krb5.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/krb5.py 2023-05-26 22:32:49.000000000 +0530 @@ -8,37 +8,44 @@ # # See the LICENSE file in the source distribution for further information. -from sos.report.plugins import Plugin, RedHatPlugin +from sos.report.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin class Krb5(Plugin): + """This plugin handles the collection of kerberos authentication config + files and logging. Users should expect to see their krb5 config(s) in the + final archive, along with krb5 logging and `klist` output. + + kdc configs and acls will also be collected from the distribution-spcecific + kdc directory. + """ short_desc = 'Kerberos authentication' plugin_name = 'krb5' profiles = ('identity', 'system') - packages = ('krb5-libs', 'krb5-user') - - # This is Debian's default, which is closest to upstream's - kdcdir = "/var/lib/krb5kdc" def setup(self): self.add_copy_spec([ "/etc/krb5.conf", "/etc/krb5.conf.d/*", - "%s/kadm5.acl" % self.kdcdir, - "%s/kdc.conf" % self.kdcdir, - "/var/log/krb5kdc.log", + f"{self.kdcdir}/kadm5.acl", + f"{self.kdcdir}/kdc.conf", "/var/log/kadmind.log" ]) - self.add_cmd_output("klist -ket %s/.k5*" % self.kdcdir) + self.add_copy_spec("/var/log/krb5kdc.log", tags="kerberos_kdc_log") + self.add_cmd_output(f"klist -ket {self.kdcdir}/.k5*") self.add_cmd_output("klist -ket /etc/krb5.keytab") class RedHatKrb5(Krb5, RedHatPlugin): - def setup(self): - self.kdcdir = "/var/kerberos/krb5kdc" - super(RedHatKrb5, self).setup() + packages = ('krb5-libs', 'krb5-server') + kdcdir = "/var/kerberos/krb5kdc" +class UbuntuKrb5(Krb5, DebianPlugin, UbuntuPlugin): + + packages = ('krb5-kdc', 'krb5-config', 'krb5-user') + kdcdir = "/var/lib/krb5kdc" + # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/landscape.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/landscape.py --- sosreport-4.4/sos/report/plugins/landscape.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/landscape.py 2023-05-26 22:32:49.000000000 +0530 @@ -29,9 +29,15 @@ ]) if not self.get_option("all_logs"): - self.add_copy_spec("/var/log/landscape/*.log") + self.add_copy_spec([ + "/var/log/landscape/*.log", + "/var/log/landscape-server/*.log", + ]) else: - self.add_copy_spec("/var/log/landscape") + self.add_copy_spec([ + "/var/log/landscape", + "/var/log/landscape-server" + ]) self.add_cmd_output([ "gpg --verify /etc/landscape/license.txt", @@ -45,55 +51,17 @@ r"registration_password(.*)", r"registration_password[********]" ) - self.do_file_sub( - "/etc/landscape/service.conf", - r"password = (.*)", - r"password = [********]" - ) - self.do_file_sub( - "/etc/landscape/service.conf", - r"store_password = (.*)", - r"store_password = [********]" - ) - self.do_file_sub( - "/etc/landscape/service.conf", - r"secret-token = (.*)", - r"secret-token = [********]" - ) - self.do_file_sub( - "/etc/landscape/service.conf", - r"oidc-client-secret = (.*)", - r"oidc-client-secret = [********]" - ) - self.do_file_sub( - "/etc/landscape/service.conf", - r"oidc-client-id = (.*)", - r"oidc-client-id = [********]" - ) - self.do_file_sub( - "/etc/landscape/service.conf.old", - r"password = (.*)", - r"password = [********]" - ) - self.do_file_sub( - "/etc/landscape/service.conf.old", - r"store_password = (.*)", - r"store_password = [********]" - ) - self.do_file_sub( - "/etc/landscape/service.conf.old", - r"secret-token = (.*)", - r"secret-token = [********]" - ) - self.do_file_sub( - "/etc/landscape/service.conf.old", - r"oidc-client-secret = (.*)", - r"oidc-client-secret = [********]" - ) - self.do_file_sub( - "/etc/landscape/service.conf.old", - r"oidc-client-id = (.*)", - r"oidc-client-id = [********]" + keys = [ + "password", + "store_password", + "secret-token", + "oidc-client-secret", + "oidc-client-id", + ] + self.do_path_regex_sub( + "/etc/landscape/service.conf*", + r"(%s) = (.*)" % "|".join(keys), + r"\1 = [********]" ) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/ldap.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ldap.py --- sosreport-4.4/sos/report/plugins/ldap.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ldap.py 2023-05-26 22:32:49.000000000 +0530 @@ -51,16 +51,12 @@ def postproc(self): super(RedHatLdap, self).postproc() - self.do_file_sub( - "/etc/nslcd.conf", - r"(\s*bindpw\s*)\S+", - r"\1********" - ) - self.do_file_sub( - "/etc/pam_ldap.conf", - r"(\s*bindpw\s*)\S+", - r"\1********" - ) + for f in ["/etc/nslcd.conf", "/etc/pam_ldap.conf"]: + self.do_file_sub( + f, + r"(\s*bindpw\s*)\S+", + r"\1********" + ) class DebianLdap(Ldap, DebianPlugin, UbuntuPlugin): diff -Nru sosreport-4.4/sos/report/plugins/libvirt.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/libvirt.py --- sosreport-4.4/sos/report/plugins/libvirt.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/libvirt.py 2023-05-26 22:32:49.000000000 +0530 @@ -73,6 +73,11 @@ for pf in ["environ", "cgroup", "maps", "numa_maps", "limits"]: self.add_copy_spec("/proc/%s/%s" % (pid, pf)) + self.add_file_tags({ + "/run/libvirt/qemu/*.xml": "var_qemu_xml", + "/var/log/libvirt/qemu/*.log": "libvirtd_qemu_log" + }) + def postproc(self): match_exp = r"(\s*passwd=\s*')([^']*)('.*)" libvirt_path_exps = [ diff -Nru sosreport-4.4/sos/report/plugins/logs.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/logs.py --- sosreport-4.4/sos/report/plugins/logs.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/logs.py 2023-05-26 22:32:49.000000000 +0530 @@ -7,10 +7,10 @@ # See the LICENSE file in the source distribution for further information. import glob -from sos.report.plugins import Plugin, IndependentPlugin +from sos.report.plugins import Plugin, PluginOpt, IndependentPlugin, CosPlugin -class Logs(Plugin, IndependentPlugin): +class LogsBase(Plugin): short_desc = 'System logs' @@ -61,9 +61,10 @@ # - systemd-journald service exists # otherwise fallback to collecting few well known logfiles directly journal = any([self.path_exists(self.path_join(p, "log/journal/")) - for p in ["/var", "/run"]]) + for p in ["/var", "/run"]]) if journal and self.is_service("systemd-journald"): - self.add_journal(since=since, tags='journal_full', priority=100) + self.add_journal(since=since, tags=['journal_full', 'journal_all'], + priority=100) self.add_journal(boot="this", since=since, tags='journal_since_boot') self.add_journal(boot="last", since=since, @@ -96,13 +97,39 @@ def postproc(self): self.do_path_regex_sub( r"/etc/rsyslog*", - r"ActionLibdbiPassword (.*)", - r"ActionLibdbiPassword [********]" - ) - self.do_path_regex_sub( - r"/etc/rsyslog*", - r"pwd=.*", - r"pwd=[******]" + r"(ActionLibdbiPassword |pwd=)(.*)", + r"\1[********]" ) + +class IndependentLogs(LogsBase, IndependentPlugin): + """ + This plugin will collect logs traditionally considered to be "system" logs, + meaning those such as /var/log/messages, rsyslog, and journals that are + not limited to unit-specific entries. + + Note that the --since option will apply to journal collections by this + plugin as well as the typical application to log files. Most users can + expect typical journal collections to include the "full" journal, as well + as journals limited to this boot and the previous boot. + """ + + plugin_name = "logs" + profiles = ('system', 'hardware', 'storage') + + +class CosLogs(LogsBase, CosPlugin): + option_list = [ + PluginOpt(name="log_days", default=3, + desc="the number of days logs to collect") + ] + + def setup(self): + super(CosLogs, self).setup() + if self.get_option("all_logs"): + self.add_cmd_output("journalctl -o export") + else: + days = self.get_option("log_days", 3) + self.add_journal(since="-%ddays" % days) + # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/lstopo.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/lstopo.py --- sosreport-4.4/sos/report/plugins/lstopo.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/lstopo.py 2023-05-26 22:32:49.000000000 +0530 @@ -16,7 +16,11 @@ plugin_name = "lstopo" profiles = ("system", "hardware") - packages = ("hwloc-libs", "libhwloc5") + packages = ( + "hwloc-libs", + "libhwloc5", + "hwloc", + ) def setup(self): # binary depends on particular package, both require hwloc-libs one diff -Nru sosreport-4.4/sos/report/plugins/lustre.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/lustre.py --- sosreport-4.4/sos/report/plugins/lustre.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/lustre.py 2023-05-26 22:32:49.000000000 +0530 @@ -39,11 +39,13 @@ self.get_params("lnet", ["peers", "routes", "routers", "nis"]) self.get_params("ldlm-states", ["*.*.state"]) self.get_params("jobid", ["jobid_name", "jobid_var"]) + self.get_params("job-stats", ["*.*.job_stats"]) + self.get_params("exports", ["*.*.exports.*.*"]) # Client Specific self.add_cmd_output([ - "lfs df", - "lfs df -i" + "lfs df", + "lfs df -i" ]) # Server Specific @@ -52,6 +54,11 @@ self.get_params("quota", ["osd-*.*.quota_slave." + "{info,limit_*,acct_*}"]) + self.add_copy_spec([ + "/sys/fs/ldiskfs", + "/proc/fs/ldiskfs", + ]) + # Grab emergency ring buffer dumps if self.get_option("all_logs"): self.add_copy_spec("/tmp/lustre-log.*") diff -Nru sosreport-4.4/sos/report/plugins/lvm2.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/lvm2.py --- sosreport-4.4/sos/report/plugins/lvm2.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/lvm2.py 2023-05-26 22:32:49.000000000 +0530 @@ -63,7 +63,7 @@ self.add_cmd_output( "vgdisplay -vv %s" % lvm_opts_foreign, - root_symlink="vgdisplay" + root_symlink="vgdisplay", tags="vgdisplay" ) pvs_cols = 'pv_mda_free,pv_mda_size,pv_mda_count,pv_mda_used_count' @@ -72,15 +72,20 @@ vgs_cols = vgs_cols + ',' + 'vg_tags,systemid' lvs_cols = ('lv_tags,devices,lv_kernel_read_ahead,lv_read_ahead,' 'stripes,stripesize') + self.add_cmd_output("lvs -a -o +%s %s" % (lvs_cols, lvm_opts_foreign), + tags="lvs_headings") + self.add_cmd_output( + "pvs -a -v -o +%s %s" % (pvs_cols, lvm_opts_foreign), + tags="pvs_headings") + self.add_cmd_output("vgs -v -o +%s %s" % (vgs_cols, lvm_opts_foreign), + tags="vgs_headings") self.add_cmd_output([ - "vgscan -vvv %s" % lvm_opts, "pvscan -v %s" % lvm_opts, - "pvs -a -v -o +%s %s" % (pvs_cols, lvm_opts_foreign), - "vgs -v -o +%s %s" % (vgs_cols, lvm_opts_foreign), - "lvs -a -o +%s %s" % (lvs_cols, lvm_opts_foreign) + "vgscan -vvv %s" % lvm_opts ]) self.add_copy_spec("/etc/lvm") + self.add_copy_spec("/run/lvm") if self.get_option('lvmdump'): self.do_lvmdump() diff -Nru sosreport-4.4/sos/report/plugins/lxd.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/lxd.py --- sosreport-4.4/sos/report/plugins/lxd.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/lxd.py 2023-05-26 22:32:49.000000000 +0530 @@ -44,6 +44,23 @@ snap_list = self.exec_cmd('snap list lxd') if snap_list["status"] == 0: self.add_cmd_output("lxd.buginfo", pred=lxd_pred) + + self.add_copy_spec([ + '/var/snap/lxd/common/config', + '/var/snap/lxd/common/global-conf', + '/var/snap/lxd/common/lxc/local.conf', + '/var/snap/lxd/common/lxd/logs/*/*.conf', + ]) + + if not self.get_option("all_logs"): + self.add_copy_spec([ + '/var/snap/lxd/common/lxd/logs/*.log', + '/var/snap/lxd/common/lxd/logs/*/*.log', + ]) + else: + self.add_copy_spec([ + '/var/snap/lxd/common/lxd/logs/**', + ]) else: self.add_copy_spec([ "/etc/default/lxd-bridge", diff -Nru sosreport-4.4/sos/report/plugins/md.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/md.py --- sosreport-4.4/sos/report/plugins/md.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/md.py 2023-05-26 22:32:49.000000000 +0530 @@ -23,7 +23,8 @@ for line in mdadm_members['output'].splitlines(): if 'linux_raid_member' in line: dev = line.split()[0] - self.add_cmd_output('mdadm -E /dev/%s' % dev) + self.add_cmd_output('mdadm -E /dev/%s' % dev, + tags="mdadm_E") self.add_copy_spec([ "/etc/mdadm.conf", diff -Nru sosreport-4.4/sos/report/plugins/memcached.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/memcached.py --- sosreport-4.4/sos/report/plugins/memcached.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/memcached.py 2023-05-26 22:32:49.000000000 +0530 @@ -26,7 +26,8 @@ def setup(self): super(RedHatMemcached, self).setup() - self.add_copy_spec("/etc/sysconfig/memcached") + self.add_copy_spec("/etc/sysconfig/memcached", + tags="sysconfig_memcached") class DebianMemcached(Memcached, DebianPlugin, UbuntuPlugin): diff -Nru sosreport-4.4/sos/report/plugins/microk8s.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/microk8s.py --- sosreport-4.4/sos/report/plugins/microk8s.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/microk8s.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,52 @@ +# Copyright (C) 2023 Canonical Ltd., +# David Negreira + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, UbuntuPlugin + + +class Microk8s(Plugin, UbuntuPlugin): + """The Microk8s plugin collects the current status of the microk8s + snap on a Ubuntu machine. + + It will collect logs from journald related to the snap.microk8s + units as well as run microk8s commands to retrieve the configuration, + status, version and loaded plugins. + """ + + short_desc = 'The lightweight Kubernetes' + plugin_name = "microk8s" + profiles = ('container',) + + packages = ('microk8s',) + + microk8s_cmd = "microk8s" + + def setup(self): + self.add_journal(units="snap.microk8s.*") + + microk8s_subcmds = [ + 'addons repo list', + 'config', + 'ctr plugins ls', + 'ctr plugins ls -d', + 'status', + 'version' + ] + + self.add_cmd_output([ + f"microk8s {subcmd}" for subcmd in microk8s_subcmds + ]) + + def postproc(self): + rsub = r'(certificate-authority-data:|token:)\s.*' + self.do_cmd_output_sub("microk8s", rsub, r'\1 "**********"') + +# vim: set et ts=4 sw=4 diff -Nru sosreport-4.4/sos/report/plugins/microshift_ovn.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/microshift_ovn.py --- sosreport-4.4/sos/report/plugins/microshift_ovn.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/microshift_ovn.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,47 @@ +# Copyright (C) 2023 Pablo Acevedo + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, RedHatPlugin + + +class MicroshiftOVN(Plugin, RedHatPlugin): + """This plugin is used to collect MicroShift 4.x OVN logs. + """ + short_desc = 'MicroShift OVN' + plugin_name = "microshift_ovn" + plugin_timeout = 300 + containers = ('ovnkube-node', 'ovnkube-master',) + packages = ('microshift-networking',) + profiles = ('microshift',) + + def setup(self): + self.add_copy_spec([ + '/etc/openvswitch/conf.db', + '/etc/openvswitch/default.conf', + '/etc/openvswitch/system-id.conf']) + + _ovs_cmd = 'ovs-appctl -t /var/run/ovn/' + _subcmds = [ + 'coverage/show', + 'memory/show', + 'ovsdb-server/sync-status' + ] + for file, db in [('ovnnb_db.ctl', 'OVN_Northbound'), + ('ovnsb_db.ctl', 'OVN_Southbound')]: + self.add_cmd_output( + [f"{_ovs_cmd}{file} {cmd}" for cmd in _subcmds], + timeout=MicroshiftOVN.plugin_timeout) + self.add_cmd_output( + f"{_ovs_cmd}{file} ovsdb-server/get-db-storage-status {db}", + timeout=MicroshiftOVN.plugin_timeout) + + self.add_cmd_output( + f'{_ovs_cmd}ovn-controller.*.ctl ct-zone-list', + timeout=MicroshiftOVN.plugin_timeout) diff -Nru sosreport-4.4/sos/report/plugins/microshift.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/microshift.py --- sosreport-4.4/sos/report/plugins/microshift.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/microshift.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,172 @@ +# Copyright 2023 Red Hat, Inc. Pablo Acevedo +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, RedHatPlugin, PluginOpt +import re + + +class Microshift(Plugin, RedHatPlugin): + """This is the plugin for MicroShift 4.X. Even though it shares some of + the OpenShift components, its IoT/Edge target makes the product nimble and + light, thus requiring different a approach when operating it. + + When enabled, this plugin will collect cluster information (such as + systemd service logs, configuration, versions, etc.)and also inspect API + resources in certain namespaces. The namespaces to scan are kube.* and + openshift.*. Other namespaces may be collected by making use of the + ``only-namespaces`` or ``add-namespaces`` options. + """ + + short_desc = 'Microshift' + plugin_name = 'microshift' + plugin_timeout = 900 + packages = ('microshift', 'microshift-selinux', 'microshift-networking') + services = (plugin_name,) + profiles = (plugin_name,) + localhost_kubeconfig = '/var/lib/microshift/resources/kubeadmin/kubeconfig' + + option_list = [ + PluginOpt('kubeconfig', default=localhost_kubeconfig, val_type=str, + desc='Path to a locally available kubeconfig file'), + PluginOpt('only-namespaces', default='', val_type=str, + desc='colon-delimited list of namespaces to collect from'), + PluginOpt('add-namespaces', default='', val_type=str, + desc=('colon-delimited list of namespaces to add to the ' + 'default collection list')) + ] + + def _setup_namespace_regexes(self): + """Combine a set of regexes for collection with any namespaces passed + to sos via the -k openshift.add-namespaces option. Note that this does + allow for end users to specify namespace regexes of their own. + """ + if self.get_option('only-namespaces'): + return [n for n in self.get_option('only-namespaces').split(':')] + + collect_regexes = [ + r'^openshift\-.+$', + r'^kube\-.+$' + ] + + if self.get_option('add-namespaces'): + for nsp in self.get_option('add-namespaces').split(':'): + collect_regexes.append(r'^%s$' % nsp) + + return collect_regexes + + def _reduce_namespace_list(self, nsps): + """Reduce the namespace listing returned to just the ones we want to + collect from. By default, as requested by OCP support personnel, this + must include all 'openshift' prefixed namespaces + + :param nsps list: Namespace names from oc output + """ + + def _match_namespace(namespace): + """Match a particular namespace for inclusion (or not) in the + collection phases + + :param namespace str: The name of a namespace + """ + + for regex in self.collect_regexes: + if re.match(regex, namespace): + return True + return False + + self.collect_regexes = self._setup_namespace_regexes() + + return list(set([n for n in nsps if _match_namespace(n)])) + + def _get_namespaces(self): + res = self.exec_cmd( + 'oc get namespaces' + ' -o custom-columns=NAME:.metadata.name' + ' --no-headers' + ' --kubeconfig=%s' % self.get_option('kubeconfig')) + if res['status'] == 0: + return self._reduce_namespace_list(res['output'].split('\n')) + return [] + + def _get_cluster_resources(self): + """Get cluster-level (non-namespaced) resources to collect + """ + global_resources = [ + 'apiservices', + 'certificatesigningrequests', + 'clusterrolebindings', + 'clusterroles', + 'componentstatuses', + 'csidrivers', + 'csinodes', + 'customresourcedefinitions', + 'flowschemas', + 'ingressclasses', + 'logicalvolumes', + 'mutatingwebhookconfigurations', + 'nodes', + 'persistentvolumes', + 'priorityclasses', + 'prioritylevelconfigurations', + 'rangeallocations', + 'runtimeclasses', + 'securitycontextconstraints', + 'selfsubjectaccessreviews', + 'selfsubjectrulesreviews', + 'storageclasses', + 'subjectaccessreviews', + 'tokenreviews', + 'validatingwebhookconfigurations', + 'volumeattachments' + ] + + _filtered_resources = [] + + for resource in global_resources: + res = self.exec_cmd( + "oc get --kubeconfig %s %s" % ( + self.get_option('kubeconfig'), resource), + timeout=Microshift.plugin_timeout) + if res['status'] == 0: + _filtered_resources.append(resource) + return _filtered_resources + + def setup(self): + """The setup() phase of this plugin will first gather system + information and then iterate through all default namespaces, and/or + those specified via the `add-namespaces` and `only-namespaces` plugin + options. Both of these options accept shell-style regexes. + + Output format for this function is based on `oc adm inspect` command, + which is used to retrieve all API resources from the cluster. + """ + self.add_forbidden_path('/var/lib/microshift') + self.add_cmd_output([ + 'microshift version', + 'microshift show-config -m effective' + ]) + + _cluster_resources_to_collect = ",".join( + self._get_cluster_resources()) + _namespaces_to_collect = " ".join( + ['ns/%s' % n for n in self._get_namespaces()]) + + if self.is_service_running(Microshift.plugin_name): + _subdir = self.get_cmd_output_path(make=False) + _kubeconfig = self.get_option('kubeconfig') + self.add_cmd_output( + 'oc adm inspect --kubeconfig %s --dest-dir %s %s' % ( + _kubeconfig, _subdir, _cluster_resources_to_collect), + suggest_filename='inspect_cluster_resources.log', + timeout=Microshift.plugin_timeout) + self.add_cmd_output( + 'oc adm inspect --kubeconfig %s --dest-dir %s %s' % ( + _kubeconfig, _subdir, _namespaces_to_collect), + suggest_filename='inspect_namespaces.log', + timeout=Microshift.plugin_timeout) diff -Nru sosreport-4.4/sos/report/plugins/mongodb.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/mongodb.py --- sosreport-4.4/sos/report/plugins/mongodb.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/mongodb.py 2023-05-26 22:32:49.000000000 +0530 @@ -37,17 +37,13 @@ self.add_cmd_output("du -sh /var/lib/mongodb/") def postproc(self): - self.do_file_sub( - "/etc/mongodb.conf", - r"(mms-token\s*=\s*.*)", - r"mms-token = ********" - ) - - self.do_file_sub( - self.var_puppet_gen + "/etc/mongodb.conf", - r"(mms-token\s*=\s*.*)", - r"mms-token = ********" - ) + for f in ["/etc/mongodb.conf", + self.var_puppet_gen + "/etc/mongodb.conf"]: + self.do_file_sub( + f, + r"(mms-token)\s*=\s*(.*)", + r"\1 = ********" + ) class RedHatMongoDb(MongoDb, RedHatPlugin): diff -Nru sosreport-4.4/sos/report/plugins/monit.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/monit.py --- sosreport-4.4/sos/report/plugins/monit.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/monit.py 2023-05-26 22:32:49.000000000 +0530 @@ -43,29 +43,13 @@ for file in self.monit_conf: # Remove username:password from files self.do_file_sub(file, - r"allow (.*):(.*)", - r"allow ********:********" - ) - self.do_file_sub(file, - r"ALLOW (.*):(.*)", - r"ALLOW ********:********" + r"(allow) (.*):(.*)", + r"\1 ********:********" ) # Remove MAILSERVER username/password self.do_file_sub(file, - r"username (\w)+", - r"username ********" - ) - self.do_file_sub(file, - r"password (\w)+", - r"password ********" - ) - self.do_file_sub(file, - r"USERNAME (\w)+", - r"USERNAME ********" - ) - self.do_file_sub(file, - r"PASSWORD (\w)+", - r"PASSWORD ********" + r"(username|password) (\w)+", + r"\1 ********" ) # vim: et ts=4 sw=4 diff -Nru sosreport-4.4/sos/report/plugins/multipath.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/multipath.py --- sosreport-4.4/sos/report/plugins/multipath.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/multipath.py 2023-05-26 22:32:49.000000000 +0530 @@ -19,7 +19,7 @@ def setup(self): self.add_cmd_tags({ - 'multipath -v4 -ll': 'insights_multipath__v4__ll' + 'multipath -v4 -ll': 'multipath__v4__ll' }) self.add_copy_spec("/etc/multipath.conf", tags='multipath_conf') diff -Nru sosreport-4.4/sos/report/plugins/mysql.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/mysql.py --- sosreport-4.4/sos/report/plugins/mysql.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/mysql.py 2023-05-26 22:32:49.000000000 +0530 @@ -34,10 +34,7 @@ self.add_copy_spec([ self.mysql_cnf, - # Required for MariaDB under pacemaker (MariaDB-Galera) - "/var/log/mysqld.log", - "/var/log/mysql/mysqld.log", - "/var/log/mariadb/mariadb.log", + "/etc/mysqlrouter/", "/var/lib/mysql/grastate.dat", "/var/lib/mysql/gvwstate.dat" ]) @@ -45,7 +42,16 @@ if self.get_option("all_logs"): self.add_copy_spec([ "/var/log/mysql*", - "/var/log/mariadb*" + "/var/log/mariadb*", + "/var/log/mysqlrouter/*" + ]) + else: + self.add_copy_spec([ + # Required for MariaDB under pacemaker (MariaDB-Galera) + "/var/log/mysqld.log", + "/var/log/mysql/mysqld.log", + "/var/log/mysqlrouter/mysqlrouter.log", + "/var/log/mariadb/mariadb.log" ]) if self.get_option("dbdump"): diff -Nru sosreport-4.4/sos/report/plugins/networking.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/networking.py --- sosreport-4.4/sos/report/plugins/networking.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/networking.py 2023-05-26 22:32:49.000000000 +0530 @@ -82,7 +82,8 @@ self.add_cmd_output("ip -o addr", root_symlink="ip_addr", tags='ip_addr') - self.add_cmd_output("route -n", root_symlink="route", tags='route') + self.add_cmd_output("ip route show table all", root_symlink="ip_route", + tags=['ip_route', 'iproute_show_table_all']) self.add_cmd_output("plotnetcfg") self.add_cmd_output("netstat %s -neopa" % self.ns_wide, @@ -93,7 +94,6 @@ "netstat -s", "netstat %s -agn" % self.ns_wide, "networkctl status -a", - "ip route show table all", "ip -6 route show table all", "ip -d route show cache", "ip -d -6 route show cache", @@ -184,7 +184,7 @@ namespaces = self.get_network_namespaces( self.get_option("namespace_pattern"), self.get_option("namespaces")) - if (namespaces): + if namespaces: # 'ip netns exec iptables-save' must be guarded by nf_tables # kmod, if 'iptables -V' output contains 'nf_tables' # analogously for ip6tables @@ -198,48 +198,67 @@ if self.test_predicate(self, pred=SoSPredicate(self, cmd_outputs=co6)) else None) - for namespace in namespaces: - _devs = self.devices['namespaced_network'][namespace] - _subdir = "namespaces/%s" % namespace - ns_cmd_prefix = cmd_prefix + namespace + " " - self.add_cmd_output([ - ns_cmd_prefix + "ip -d address show", - ns_cmd_prefix + "ip route show table all", - ns_cmd_prefix + "ip -s -s neigh show", - ns_cmd_prefix + "ip -4 rule list", - ns_cmd_prefix + "ip -6 rule list", - ns_cmd_prefix + "ip vrf show", - ns_cmd_prefix + "netstat %s -neopa" % self.ns_wide, - ns_cmd_prefix + "netstat -s", - ns_cmd_prefix + "netstat %s -agn" % self.ns_wide, - ns_cmd_prefix + "nstat -zas", - ], priority=50, subdir=_subdir) - self.add_cmd_output([ns_cmd_prefix + "iptables-save"], - pred=iptables_with_nft, - subdir=_subdir, - priority=50) - self.add_cmd_output([ns_cmd_prefix + "ip6tables-save"], - pred=ip6tables_with_nft, - subdir=_subdir, - priority=50) - - ss_cmd = ns_cmd_prefix + "ss -peaonmi" - # --allow-system-changes is handled directly in predicate - # evaluation, so plugin code does not need to separately - # check for it - self.add_cmd_output(ss_cmd, pred=ss_pred, subdir=_subdir) - - # Collect ethtool commands only when ethtool_namespaces - # is set to true. - if self.get_option("ethtool_namespaces"): - # Devices that exist in a namespace use less ethtool - # parameters. Run this per namespace. - self.add_device_cmd([ - ns_cmd_prefix + "ethtool %(dev)s", - ns_cmd_prefix + "ethtool -i %(dev)s", - ns_cmd_prefix + "ethtool -k %(dev)s", - ns_cmd_prefix + "ethtool -S %(dev)s" - ], devices=_devs['ethernet'], priority=50, subdir=_subdir) + + for namespace in namespaces: + _devs = self.devices['namespaced_network'][namespace] + _subdir = "namespaces/%s" % namespace + ns_cmd_prefix = cmd_prefix + namespace + " " + self.add_cmd_output([ + ns_cmd_prefix + "ip -d address show", + ns_cmd_prefix + "ip route show table all", + ns_cmd_prefix + "ip -s -s neigh show", + ns_cmd_prefix + "ip -4 rule list", + ns_cmd_prefix + "ip -6 rule list", + ns_cmd_prefix + "ip vrf show", + ns_cmd_prefix + "sysctl -a", + ns_cmd_prefix + "netstat %s -neopa" % self.ns_wide, + ns_cmd_prefix + "netstat -s", + ns_cmd_prefix + "netstat %s -agn" % self.ns_wide, + ns_cmd_prefix + "nstat -zas", + ], priority=50, subdir=_subdir) + self.add_cmd_output([ns_cmd_prefix + "iptables-save"], + pred=iptables_with_nft, + subdir=_subdir, + priority=50) + self.add_cmd_output([ns_cmd_prefix + "ip6tables-save"], + pred=ip6tables_with_nft, + subdir=_subdir, + priority=50) + + ss_cmd = ns_cmd_prefix + "ss -peaonmi" + # --allow-system-changes is handled directly in predicate + # evaluation, so plugin code does not need to separately + # check for it + self.add_cmd_output(ss_cmd, pred=ss_pred, subdir=_subdir) + + # Collect ethtool commands only when ethtool_namespaces + # is set to true. + if self.get_option("ethtool_namespaces"): + # Devices that exist in a namespace use less ethtool + # parameters. Run this per namespace. + self.add_device_cmd([ + ns_cmd_prefix + "ethtool %(dev)s", + ns_cmd_prefix + "ethtool -i %(dev)s", + ns_cmd_prefix + "ethtool -k %(dev)s", + ns_cmd_prefix + "ethtool -S %(dev)s" + ], devices=_devs['ethernet'], priority=50, subdir=_subdir) + + self.add_cmd_tags({ + "ethtool [^-].*": "ethtool", + "ethtool -S.*": "ethtool_S", + "ethtool -T.*": "ethtool_T", + "ethtool -a.*": "ethtool_a", + "ethtool -c.*": "ethtool_c", + "ethtool -g.*": "ethtool_g", + "ethtool -i.*": "ethtool_i", + "ethtool -k.*": "ethtool_k", + "ip -d address": "ip_addr", + "ip -s -s neigh show": "ip_neigh_show", + "ip -s -d link": "ip_s_link", + "netstat.*-neopa": "netstat", + "netstat.*-agn": "netstat_agn", + "netstat -s": "netstat_s" + }) class RedHatNetworking(Networking, RedHatPlugin): @@ -248,7 +267,7 @@ def setup(self): # Handle change from -T to -W in Red Hat netstat 2.0 and greater. try: - netstat_pkg = self.policy.package_manager.packages['net-tools'] + netstat_pkg = self.policy.package_manager.pkg_by_name('net-tools') # major version if int(netstat_pkg['version'][0]) < 2: self.ns_wide = "-T" diff -Nru sosreport-4.4/sos/report/plugins/networkmanager.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/networkmanager.py --- sosreport-4.4/sos/report/plugins/networkmanager.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/networkmanager.py 2023-05-26 22:32:49.000000000 +0530 @@ -53,6 +53,7 @@ self.add_cmd_output([ "nmcli general status", "nmcli con", + "nmcli -f all con", "nmcli con show --active", "nmcli dev"]) nmcli_con_details_cmd = nmcli_con_details_template % "show" @@ -99,6 +100,11 @@ devices='ethernet' ) + self.add_cmd_tags({ + "nmcli dev show": "nmcli_dev_show", + "nmcli dev show .*": "nmcli_dev_show_sos" + }) + def postproc(self): for root, dirs, files in os.walk( "/etc/NetworkManager/system-connections"): diff -Nru sosreport-4.4/sos/report/plugins/nfs.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/nfs.py --- sosreport-4.4/sos/report/plugins/nfs.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/nfs.py 2023-05-26 22:32:49.000000000 +0530 @@ -29,12 +29,14 @@ "/var/lib/nfs/etab", "/var/lib/nfs/xtab", "/var/lib/nfs/rmtab", + "/proc/fs/nfsd", ]) self.add_cmd_output([ "rpcinfo -p localhost", "nfsstat -o all", "exportfs -v", + "nfsdclnts", ]) diff -Nru sosreport-4.4/sos/report/plugins/nvme.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/nvme.py --- sosreport-4.4/sos/report/plugins/nvme.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/nvme.py 2023-05-26 22:32:49.000000000 +0530 @@ -10,12 +10,20 @@ class Nvme(Plugin, IndependentPlugin): + """Collects nvme device configuration information for each nvme device that + is installed on the system. - short_desc = 'Collect config and system information about NVMe devices' + Basic information is collected via the `smartctl` utility, however detailed + information will be collected via the `nvme` CLI if the `nvme-cli` package + is installed. + """ + + short_desc = 'NVMe device information' plugin_name = "nvme" profiles = ('storage',) packages = ('nvme-cli',) + kernel_mods = ('nvme', 'nvme_core') def setup(self): self.add_copy_spec("/etc/nvme/*") diff -Nru sosreport-4.4/sos/report/plugins/openhpi.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openhpi.py --- sosreport-4.4/sos/report/plugins/openhpi.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openhpi.py 2023-05-26 22:32:49.000000000 +0530 @@ -24,7 +24,7 @@ def postproc(self): self.do_file_sub("/etc/openhpi/openhpi.conf", - r'(\s*[Pp]ass.*\s*=\s*).*', r'\1********') + r'(\s*pass.*\s*=\s*).*', r'\1********') # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/openstack_aodh.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_aodh.py --- sosreport-4.4/sos/report/plugins/openstack_aodh.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_aodh.py 2023-05-26 22:32:49.000000000 +0530 @@ -34,12 +34,12 @@ if self.get_option("all_logs"): self.add_copy_spec([ "/var/log/aodh/*", - "/var/log/httpd/aodh*", + "/var/log/{}*/aodh*".format(self.apachepkg), ]) else: self.add_copy_spec([ "/var/log/aodh/*.log", - "/var/log/httpd/aodh*.log", + "/var/log/{}*/aodh*.log".format(self.apachepkg), ]) vars_all = [p in os.environ for p in [ @@ -92,17 +92,21 @@ class DebianOpenStackAodh(OpenStackAodh, DebianPlugin, UbuntuPlugin): + apachepkg = "apache2" packages = ( 'aodh-api', + 'aodh-common', 'aodh-evaluator', 'aodh-notifier', 'aodh-listener', - 'python-aodhclient' + 'python-aodh', + 'python3-aodh', ) class RedHatOpenStackAodh(OpenStackAodh, RedHatPlugin): + apachepkg = "httpd" packages = ('openstack-selinux',) def setup(self): diff -Nru sosreport-4.4/sos/report/plugins/openstack_ceilometer.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_ceilometer.py --- sosreport-4.4/sos/report/plugins/openstack_ceilometer.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_ceilometer.py 2023-05-26 22:32:49.000000000 +0530 @@ -37,6 +37,11 @@ self.var_puppet_gen + "/etc/ceilometer/*" ]) + self.add_file_tags({ + "/var/log/ceilometer/central.log": + "ceilometer_central_log" + }) + def apply_regex_sub(self, regexp, subst): self.do_path_regex_sub("/etc/ceilometer/*", regexp, subst) self.do_path_regex_sub( @@ -71,10 +76,11 @@ 'ceilometer-api', 'ceilometer-agent-central', 'ceilometer-agent-compute', + 'ceilometer-agent-notification', 'ceilometer-collector', 'ceilometer-common', 'python-ceilometer', - 'python-ceilometerclient' + 'python3-ceilometer', ) diff -Nru sosreport-4.4/sos/report/plugins/openstack_cinder.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_cinder.py --- sosreport-4.4/sos/report/plugins/openstack_cinder.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_cinder.py 2023-05-26 22:32:49.000000000 +0530 @@ -63,12 +63,12 @@ if self.get_option("all_logs"): self.add_copy_spec([ "/var/log/cinder/", - "/var/log/httpd/cinder*", + "/var/log/{}*/cinder*".format(self.apachepkg), ]) else: self.add_copy_spec([ "/var/log/cinder/*.log", - "/var/log/httpd/cinder*.log", + "/var/log/{}*/cinder*.log".format(self.apachepkg), ]) def apply_regex_sub(self, regexp, subst): @@ -107,6 +107,7 @@ class DebianCinder(OpenStackCinder, DebianPlugin, UbuntuPlugin): cinder = False + apachepkg = 'apache2' packages = ( 'cinder-api', 'cinder-backup', @@ -114,24 +115,14 @@ 'cinder-scheduler', 'cinder-volume', 'python-cinder', - 'python-cinderclient' + 'python3-cinder', ) - def setup(self): - super(DebianCinder, self).setup() - if self.get_option("all_logs"): - self.add_copy_spec([ - "/var/log/apache/cinder*", - ]) - else: - self.add_copy_spec([ - "/var/log/apache/cinder*.log", - ]) - class RedHatCinder(OpenStackCinder, RedHatPlugin): cinder = False + apachepkg = 'httpd' packages = ('openstack-selinux',) def setup(self): diff -Nru sosreport-4.4/sos/report/plugins/openstack_glance.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_glance.py --- sosreport-4.4/sos/report/plugins/openstack_glance.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_glance.py 2023-05-26 22:32:49.000000000 +0530 @@ -72,6 +72,13 @@ else: self.add_cmd_output("openstack image list --long") + self.add_file_tags({ + "/etc/glance/glance-api.conf": "glance_api_conf", + "/etc/glance/glance-cache.conf": "glance_cache_conf", + "/etc/glance/glance-registry.conf": "glance_registry_conf", + "/var/log/glance/api.log": "glance_api_log" + }) + def apply_regex_sub(self, regexp, subst): self.do_path_regex_sub("/etc/glance/*", regexp, subst) self.do_path_regex_sub( @@ -107,7 +114,8 @@ 'glance-client', 'glance-common', 'glance-registry', - 'python-glance' + 'python-glance', + 'python3-glance', ) service_name = 'glance-api.service' diff -Nru sosreport-4.4/sos/report/plugins/openstack_gnocchi.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_gnocchi.py --- sosreport-4.4/sos/report/plugins/openstack_gnocchi.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_gnocchi.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,111 @@ +# Copyright (C) 2016 Red Hat, Inc., Sachin Patil +# Copyright (C) 2017 Red Hat, Inc., Martin Schuppert + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +import os +from sos.report.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin + + +class Gnocchi(Plugin): + + short_desc = 'Gnocchi - Metric as a service' + plugin_name = "openstack_gnocchi" + + profiles = ('openstack', 'openstack_controller') + + def setup(self): + self.add_copy_spec([ + "/etc/gnocchi/*", + ]) + + if self.get_option("all_logs"): + self.add_copy_spec([ + "/var/log/gnocchi/*", + "/var/log/{}*/gnocchi*".format(self.apachepkg) + ]) + else: + self.add_copy_spec([ + "/var/log/gnocchi/*.log", + "/var/log/{}*/gnocchi*.log".format(self.apachepkg) + ]) + + vars_all = [p in os.environ for p in [ + 'OS_USERNAME', 'OS_PASSWORD']] + + vars_any = [p in os.environ for p in [ + 'OS_TENANT_NAME', 'OS_PROJECT_NAME']] + + if not (all(vars_all) and any(vars_any)): + self.soslog.warning("Not all environment variables set. Source " + "the environment file for the user intended " + "to connect to the OpenStack environment.") + else: + self.add_cmd_output([ + "gnocchi --version", + "gnocchi status", + "gnocchi capabilities list", + "gnocchi archive-policy list", + "gnocchi resource list", + "gnocchi resource-type list" + ]) + + def postproc(self): + self.do_file_sub( + "/etc/gnocchi/gnocchi.conf", + r"(ceph_secret|password|memcache_secret_key)\s?=(.*)", + r"\1=*****", + ) + + +class RedHatGnocchi(Gnocchi, RedHatPlugin): + + apachepkg = 'httpd' + var_puppet_gen = "/var/lib/config-data/puppet-generated/gnocchi" + + packages = ( + 'openstack-gnocchi-metricd', 'openstack-gnocchi-common', + 'openstack-gnocchi-statsd', 'openstack-gnocchi-api', + 'openstack-gnocchi-carbonara' + ) + + def setup(self): + super(RedHatGnocchi, self).setup() + self.add_copy_spec([ + self.var_puppet_gen + "/etc/gnocchi/*", + self.var_puppet_gen + "/etc/httpd/conf/*", + self.var_puppet_gen + "/etc/httpd/conf.d/*", + self.var_puppet_gen + "/etc/httpd/conf.modules.d/wsgi.conf", + self.var_puppet_gen + "/etc/my.cnf.d/tripleo.cnf" + ]) + + def postproc(self): + super(RedHatGnocchi, self).postproc() + self.do_file_sub( + self.var_puppet_gen + "/etc/gnocchi/" + "gnocchi.conf", + r"(ceph_secret|password|memcache_secret_key)\s?=(.*)", + r"\1=*****", + ) + + +class DebianGnocchi(Gnocchi, DebianPlugin, UbuntuPlugin): + + apachepkg = 'apache2' + + packages = ( + 'gnocchi-api', + 'gnocchi-metricd', + 'gnocchi-common', + 'gnocchi-statsd', + 'python-gnocchi', + 'python3-gnocchi', + ) + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/openstack_heat.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_heat.py --- sosreport-4.4/sos/report/plugins/openstack_heat.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_heat.py 2023-05-26 22:32:49.000000000 +0530 @@ -79,6 +79,10 @@ self.var_puppet_gen + "_api_cfn/var/spool/cron/heat", ]) + self.add_file_tags({ + "/var/log/heat/heat-engine.log": "heat_engine_log" + }) + def apply_regex_sub(self, regexp, subst): self.do_path_regex_sub( "/etc/heat/*", @@ -124,7 +128,7 @@ 'heat-common', 'heat-engine', 'python-heat', - 'python-heatclient' + 'python3-heat', ) service_name = 'heat-api.service' diff -Nru sosreport-4.4/sos/report/plugins/openstack_horizon.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_horizon.py --- sosreport-4.4/sos/report/plugins/openstack_horizon.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_horizon.py 2023-05-26 22:32:49.000000000 +0530 @@ -51,22 +51,11 @@ ] regexp = r"((?m)^\s*(%s)\s*=\s*)(.*)" % "|".join(protect_keys) - self.do_path_regex_sub( - r"/etc/openstack-dashboard/.*\.json", - regexp, r"\1*********" - ) - self.do_path_regex_sub( - var_puppet_gen + r"/etc/openstack-dashboard/.*\.json", - regexp, r"\1*********" - ) - self.do_path_regex_sub( - "/etc/openstack-dashboard/local_settings$", - regexp, r"\1*********" - ) - self.do_path_regex_sub( - var_puppet_gen + "/etc/openstack-dashboard/local_settings$", - regexp, r"\1*********" - ) + for regpath in [r"/etc/openstack-dashboard/.*\.json", + "/etc/openstack-dashboard/local_settings$"]: + self.do_path_regex_sub(regpath, regexp, r"\1*********") + self.do_path_regex_sub(var_puppet_gen + regpath, + regexp, r"\1*********") class DebianHorizon(OpenStackHorizon, DebianPlugin): @@ -86,8 +75,9 @@ packages = ( 'python-django-horizon', + 'python3-django-horizon', 'openstack-dashboard', - 'openstack-dashboard-ubuntu-theme' + 'openstack-dashboard-ubuntu-theme', ) def setup(self): diff -Nru sosreport-4.4/sos/report/plugins/openstack_instack.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_instack.py --- sosreport-4.4/sos/report/plugins/openstack_instack.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_instack.py 2023-05-26 22:32:49.000000000 +0530 @@ -44,15 +44,17 @@ if self.get_option("all_logs"): self.add_copy_spec([ - "/var/log/mistral/", "/var/log/zaqar/", ]) else: self.add_copy_spec([ - "/var/log/mistral/*.log", "/var/log/zaqar/*.log", ]) + self.add_file_tags({ + "/var/log/mistral/executor.log": "mistral_executor_log" + }) + vars_all = [p in os.environ for p in [ 'OS_USERNAME', 'OS_PASSWORD']] @@ -81,16 +83,17 @@ # capture all the possible stack uuids get_stacks = "openstack stack list" stacks = self.collect_cmd_output(get_stacks)['output'] - stack_ids = re.findall(r'(\s(\w+-\w+)+\s)', stacks) + stack_ids = re.findall(r'(\|\s(((\w+-){4})\w+)\s\|)', stacks) # get status of overcloud stack and resources - for sid in stack_ids: + for _sid in stack_ids: + sid = _sid[1] self.add_cmd_output([ - "openstack stack show %s" % sid[0], - "openstack stack resource list -n 10 %s" % sid[0] + "openstack stack show %s" % sid, + "openstack stack resource list -n 10 %s" % sid ]) # get details on failed deployments - cmd = "openstack stack resource list -f value -n 5 %s" % sid[0] + cmd = "openstack stack resource list -f value -n 5 %s" % sid deployments = self.exec_cmd(cmd) for deployment in deployments['output'].splitlines(): if 'FAILED' in deployment: @@ -110,27 +113,25 @@ "tripleo-ui-logs tripleo-ui.logs --file -") def postproc(self): + # do_file_sub is case insensitive, so protected_keys can be lowercase + # only protected_keys = [ - "UNDERCLOUD_TUSKAR_PASSWORD", "UNDERCLOUD_ADMIN_PASSWORD", - "UNDERCLOUD_CEILOMETER_METERING_SECRET", - "UNDERCLOUD_CEILOMETER_PASSWORD", - "UNDERCLOUD_CEILOMETER_SNMPD_PASSWORD", - "UNDERCLOUD_DB_PASSWORD", "UNDERCLOUD_GLANCE_PASSWORD", - "UNDERCLOUD_HEAT_PASSWORD", - "UNDERCLOUD_HEAT_STACK_DOMAIN_ADMIN_PASSWORD", - "UNDERCLOUD_HORIZON_SECRET_KEY", "UNDERCLOUD_IRONIC_PASSWORD", - "UNDERCLOUD_NEUTRON_PASSWORD", "UNDERCLOUD_NOVA_PASSWORD", - "UNDERCLOUD_RABBIT_PASSWORD", "UNDERCLOUD_SWIFT_PASSWORD", - "UNDERCLOUD_TUSKAR_PASSWORD", "OS_PASSWORD", - "undercloud_db_password", "undercloud_admin_password", - "undercloud_glance_password", "undercloud_heat_password", - "undercloud_neutron_password", "undercloud_nova_password", - "undercloud_ironic_password", "undercloud_tuskar_password", - "undercloud_ceilometer_password", + "os_password", + "undercloud_admin_password", "undercloud_ceilometer_metering_secret", + "undercloud_ceilometer_password", "undercloud_ceilometer_snmpd_password", - "undercloud_swift_password", "undercloud_rabbit_password", - "undercloud_heat_stack_domain_admin_password" + "undercloud_db_password", + "undercloud_glance_password", + "undercloud_heat_password", + "undercloud_heat_stack_domain_admin_password", + "undercloud_horizon_secret_key", + "undercloud_ironic_password", + "undercloud_neutron_password", + "undercloud_nova_password", + "undercloud_rabbit_password", + "undercloud_swift_password", + "undercloud_tuskar_password", ] regexp = r"((?m)(%s)=)(.*)" % "|".join(protected_keys) self.do_file_sub("/home/stack/.instack/install-undercloud.log", diff -Nru sosreport-4.4/sos/report/plugins/openstack_ironic.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_ironic.py --- sosreport-4.4/sos/report/plugins/openstack_ironic.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_ironic.py 2023-05-26 22:32:49.000000000 +0530 @@ -110,6 +110,10 @@ for path in ['/var/lib/ironic', '/httpboot', '/tftpboot']: self.add_cmd_output('ls -laRt %s' % path) + self.add_file_tags({ + ".*/etc/ironic/ironic.conf": "ironic_conf" + }) + vars_all = [p in os.environ for p in [ 'OS_USERNAME', 'OS_PASSWORD']] diff -Nru sosreport-4.4/sos/report/plugins/openstack_keystone.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_keystone.py --- sosreport-4.4/sos/report/plugins/openstack_keystone.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_keystone.py 2023-05-26 22:32:49.000000000 +0530 @@ -44,10 +44,12 @@ if self.get_option("all_logs"): self.add_copy_spec([ "/var/log/keystone/", + "/var/log/{}*/keystone*".format(self.apachepkg), ]) else: self.add_copy_spec([ "/var/log/keystone/*.log", + "/var/log/{}*/keystone*.log".format(self.apachepkg), ]) # collect domain config directory, if specified @@ -75,6 +77,11 @@ self.add_cmd_output("openstack endpoint list") self.add_cmd_output("openstack catalog list") + self.add_file_tags({ + ".*/etc/keystone/keystone.conf": "keystone_conf", + "/var/log/keystone/keystone.log": "keystone_log" + }) + def apply_regex_sub(self, regexp, subst): self.do_path_regex_sub("/etc/keystone/*", regexp, subst) self.do_path_regex_sub( @@ -101,29 +108,27 @@ ) # obfuscate LDAP plaintext passwords in domain config dir - self.do_path_regex_sub(self.domain_config_dir, - r"((?m)^\s*(%s)\s*=\s*)(.*)", r"\1********") + self.do_path_regex_sub( + self.domain_config_dir, + r"((?m)^\s*(%s)\s*=\s*)(.*)" % "|".join(protect_keys), + r"\1********" + ) class DebianKeystone(OpenStackKeystone, DebianPlugin, UbuntuPlugin): + apachepkg = 'apache2' packages = ( 'keystone', 'python-keystone', - 'python-keystoneclient' + 'python3-keystone', ) class RedHatKeystone(OpenStackKeystone, RedHatPlugin): + apachepkg = 'httpd' packages = ('openstack-selinux',) - def setup(self): - super(RedHatKeystone, self).setup() - if self.get_option("all_logs"): - self.add_copy_spec("/var/log/httpd/keystone*") - else: - self.add_copy_spec("/var/log/httpd/keystone*.log") - # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/openstack_manila.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_manila.py --- sosreport-4.4/sos/report/plugins/openstack_manila.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_manila.py 2023-05-26 22:32:49.000000000 +0530 @@ -46,6 +46,10 @@ "/var/log/manila/*.log", ]) + self.add_file_tags({ + ".*/etc/manila/manila.conf": "manila_conf" + }) + def apply_regex_sub(self, regexp, subst): self.do_path_regex_sub("/etc/manila/*", regexp, subst) self.do_path_regex_sub( @@ -78,9 +82,21 @@ 'manila-common', 'manila-api', 'manila-share', - 'manila-scheduler' + 'manila-scheduler', + 'python3-manila', ) + def setup(self): + super(DebianManila, self).setup() + if self.get_option("all_logs"): + self.add_copy_spec([ + "/var/log/apache2/manila*", + ]) + else: + self.add_copy_spec([ + "/var/log/apache2/manila*.log", + ]) + class RedHatManila(OpenStackManila, RedHatPlugin): @@ -91,5 +107,14 @@ super(RedHatManila, self).setup() self.add_copy_spec("/etc/sudoers.d/manila") + if self.get_option("all_logs"): + self.add_copy_spec([ + "/var/log/containers/manila/*" + ]) + else: + self.add_copy_spec([ + "/var/log/containers/manila/*.log" + ]) + # vim: et ts=4 sw=4 diff -Nru sosreport-4.4/sos/report/plugins/openstack_mistral.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_mistral.py --- sosreport-4.4/sos/report/plugins/openstack_mistral.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_mistral.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,48 @@ +# Copyright (C) 2022 Red Hat, Inc. + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, RedHatPlugin + + +MISTRAL_DIRECTORIES = [ + '/var/log/mistral/', + '/var/lib/mistral/', + ] +MISTRAL_LOGS = [ + '/var/log/mistral/*.log', + '/var/lib/mistral/*/*.log', + ] + + +class OpenStackMistral(Plugin, RedHatPlugin): + '''Gather Mistral directories content, both data from /var/lib/mistral + and its log from /var/log/mistral if it exists (older OSP). + The data also embed logs for the ansible runs launched via the service, + meaning we'll be able to properly debug failures therein. The rest of the + data are the generated environment files, also really useful in order + to debug an issue at deploy or day-2 operations. + We filter out on the presence of any "mistral" related container on the + host - usually the Undercloud presents mistral_engine, mistral_executor + and mistral_api. + ''' + + short_desc = 'OpenStack Mistral' + + plugin_name = "openstack_mistral" + profiles = ('openstack', 'openstack_undercloud') + containers = ('.*mistral_engine',) + + def setup(self): + if self.get_option('all_log'): + self.add_copy_spec(MISTRAL_DIRECTORIES) + else: + self.add_copy_spec(MISTRAL_LOGS) + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/openstack_neutron.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_neutron.py --- sosreport-4.4/sos/report/plugins/openstack_neutron.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_neutron.py 2023-05-26 22:32:49.000000000 +0530 @@ -63,6 +63,11 @@ self.add_cmd_output("openstack floating ip list") self.add_cmd_output("openstack security group list") + self.add_file_tags({ + ".*/etc/neutron/plugins/ml2/ml2_conf.ini": "neutronml2_conf", + "/var/log/neutron/server.log": "neutron_server_log" + }) + def apply_regex_sub(self, regexp, subst): self.do_path_regex_sub("/etc/neutron/*", regexp, subst) self.do_path_regex_sub( @@ -106,7 +111,7 @@ 'neutron-plugin-ryu-agent', 'neutron-server', 'python-neutron', - 'python-neutronclient' + 'python3-neutron', ) def check_enabled(self): diff -Nru sosreport-4.4/sos/report/plugins/openstack_novajoin.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_novajoin.py --- sosreport-4.4/sos/report/plugins/openstack_novajoin.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_novajoin.py 2023-05-26 22:32:49.000000000 +0530 @@ -25,12 +25,9 @@ self.add_copy_spec("/var/log/novajoin/*.log") def postproc(self): - regexp = (r"(?i)password=(.*)") + regexp = (r"(password|memcache_secret_key)=(.*)") self.do_file_sub("/etc/novajoin/join.conf", regexp, - r"password=*********") - regexp = (r"(?i)memcache_secret_key=(.*)") - self.do_file_sub("/etc/novajoin/join.conf", regexp, - r"password=*********") + r"\1=*********") class RedHatNovajoin(OpenStackNovajoin, RedHatPlugin): diff -Nru sosreport-4.4/sos/report/plugins/openstack_nova.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_nova.py --- sosreport-4.4/sos/report/plugins/openstack_nova.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_nova.py 2023-05-26 22:32:49.000000000 +0530 @@ -92,6 +92,7 @@ if self.get_option("all_logs"): self.add_copy_spec([ "/var/log/nova/", + "/var/log/{}*/nova*".format(self.apachepkg), ]) else: novadir = '/var/log/nova/' @@ -106,6 +107,9 @@ ] for novalog in novalogs: self.add_copy_spec(self.path_join(novadir, novalog)) + self.add_copy_spec([ + "/var/log/{}*/nova*.log".format(self.apachepkg), + ]) pp = ['', '_libvirt', '_metadata', '_placement'] sp = [ @@ -162,6 +166,7 @@ class DebianNova(OpenStackNova, DebianPlugin, UbuntuPlugin): + apachepkg = "apache2" nova = False packages = ( 'nova-api-ec2', @@ -183,8 +188,8 @@ 'nova-volume', 'novnc', 'python-nova', - 'python-novaclient', - 'python-novnc' + 'python-novnc', + 'python3-nova', ) service_name = "nova-api.service" @@ -198,6 +203,7 @@ class RedHatNova(OpenStackNova, RedHatPlugin): + apachepkg = "httpd" nova = False packages = ('openstack-selinux',) @@ -212,12 +218,10 @@ ]) if self.get_option("all_logs"): self.add_copy_spec([ - "/var/log/httpd/nova*", "/var/log/httpd/placement*", ]) else: self.add_copy_spec([ - "/var/log/httpd/nova*.log", "/var/log/httpd/placement*.log", ]) diff -Nru sosreport-4.4/sos/report/plugins/openstack_octavia.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_octavia.py --- sosreport-4.4/sos/report/plugins/openstack_octavia.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_octavia.py 2023-05-26 22:32:49.000000000 +0530 @@ -46,6 +46,10 @@ self.var_puppet_gen + "/etc/my.cnf.d/tripleo.cnf", ]) + self.add_file_tags({ + ".*/etc/octavia/octavia.conf": "octavia_conf" + }) + # don't collect certificates self.add_forbidden_path("/etc/octavia/certs") self.add_forbidden_path(self.var_config_data + "/etc/octavia/certs") @@ -124,7 +128,11 @@ class DebianOctavia(OpenStackOctavia, DebianPlugin, UbuntuPlugin): - packages = ('octavia-common', 'octavia-api', ) + packages = ( + 'octavia-common', + 'octavia-api', + 'python3-octavia', + ) def setup(self): super(DebianOctavia, self).setup() diff -Nru sosreport-4.4/sos/report/plugins/openstack_placement.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_placement.py --- sosreport-4.4/sos/report/plugins/openstack_placement.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_placement.py 2023-05-26 22:32:49.000000000 +0530 @@ -43,13 +43,15 @@ self.add_copy_spec([ "/var/log/placement/", "/var/log/containers/placement/", - "/var/log/containers/httpd/placement-api/" + "/var/log/containers/httpd/placement-api/", + "/var/log/{}*/placement*".format(self.apachepkg), ]) else: self.add_copy_spec([ "/var/log/placement/*.log", "/var/log/containers/placement/*.log", "/var/log/containers/httpd/placement-api/*log", + "/var/log/{}*/placement*.log".format(self.apachepkg), ]) self.add_copy_spec([ @@ -85,26 +87,17 @@ class DebianPlacement(OpenStackPlacement, DebianPlugin, UbuntuPlugin): - packages = ('placement',) - service_name = 'placement-api' - - def setup(self): - super(DebianPlacement, self).setup() - if self.get_option("all_logs"): - self.add_copy_spec("/var/log/apache2/placement*") - else: - self.add_copy_spec("/var/log/apache2/placement*.log") + apachepkg = "apache2" + packages = ( + 'placement-common', + 'placement-api', + 'python3-placement', + ) class RedHatPlacement(OpenStackPlacement, RedHatPlugin): + apachepkg = "httpd" packages = ('openstack-selinux',) - def setup(self): - super(RedHatPlacement, self).setup() - if self.get_option("all_logs"): - self.add_copy_spec("/var/log/httpd/placement*") - else: - self.add_copy_spec("/var/log/httpd/placement*.log") - # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/openstack_sahara.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_sahara.py --- sosreport-4.4/sos/report/plugins/openstack_sahara.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_sahara.py 2023-05-26 22:32:49.000000000 +0530 @@ -70,7 +70,7 @@ 'sahara-common', 'sahara-engine', 'python-sahara', - 'python-saharaclient', + 'python3-sahara', ) def setup(self): diff -Nru sosreport-4.4/sos/report/plugins/openstack_swift.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_swift.py --- sosreport-4.4/sos/report/plugins/openstack_swift.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_swift.py 2023-05-26 22:32:49.000000000 +0530 @@ -40,6 +40,11 @@ self.var_puppet_gen + "/memcached/etc/sysconfig/memcached" ]) + self.add_file_tags({ + "/etc/swift/swift.conf": "swift_conf", + "/var/log/swift/swift.log": "swift_log" + }) + def apply_regex_sub(self, regexp, subst): self.do_path_regex_sub(r"/etc/swift/.*\.conf.*", regexp, subst) self.do_path_regex_sub( @@ -77,7 +82,8 @@ 'swift-proxy', 'swauth', 'python-swift', - 'python-swauth' + 'python-swauth', + 'python3-swift', ) diff -Nru sosreport-4.4/sos/report/plugins/openstack_tripleo.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_tripleo.py --- sosreport-4.4/sos/report/plugins/openstack_tripleo.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_tripleo.py 2023-05-26 22:32:49.000000000 +0530 @@ -9,7 +9,6 @@ # See the LICENSE file in the source distribution for further information. from sos.report.plugins import Plugin, IndependentPlugin -import re class OpenStackTripleO(Plugin, IndependentPlugin): @@ -36,11 +35,9 @@ # Ensures we do not leak passwords from the tripleo-config and # hieradata locations. # Other locations don't have sensitive data. - secrets = r'(".*(key|password|pass|secret|database_connection))' \ - r'([":\s]+)(.*[^"])([",]+)' - rgxp = re.compile(secrets, re.IGNORECASE) - + regexp = r'(".*(key|password|pass|secret|database_connection))' \ + r'([":\s]+)(.*[^"])([",]+)' for path in self.tripleo_log_paths: - self.do_path_regex_sub(path, rgxp, r'\1\3*********\5') + self.do_path_regex_sub(path, regexp, r'\1\3*********\5') # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/openstack_trove.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_trove.py --- sosreport-4.4/sos/report/plugins/openstack_trove.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openstack_trove.py 2023-05-26 22:32:49.000000000 +0530 @@ -67,7 +67,8 @@ 'python-trove', 'trove-common', 'trove-api', - 'trove-taskmanager' + 'trove-taskmanager', + 'python3-trove', ) def setup(self): diff -Nru sosreport-4.4/sos/report/plugins/openvswitch.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openvswitch.py --- sosreport-4.4/sos/report/plugins/openvswitch.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/openvswitch.py 2023-05-26 22:32:49.000000000 +0530 @@ -85,10 +85,14 @@ if ovs_dbdir: self.add_copy_spec(self.path_join(ovs_dbdir, 'conf.db')) + self.add_file_tags({ + "/var/log/openvswitch/ovs-vswitchd.log": + "openvswitch_daemon_log", + "/var/log/openvswitch/ovsdb-server.log": + "openvswitch_server_log" + }) + self.add_cmd_output([ - # The '-t 5' adds an upper bound on how long to wait to connect - # to the Open vSwitch server, avoiding hangs when running sos. - "ovs-vsctl -t 5 show", # List the contents of important runtime directories "ls -laZ /run/openvswitch", "ls -laZ /dev/hugepages/", @@ -116,8 +120,6 @@ "ovs-appctl tnl/ports/show -v", # Capture upcall information "ovs-appctl upcall/show", - # Capture DPDK and other parameters - "ovs-vsctl -t 5 get Open_vSwitch . other_config", # Capture OVS list "ovs-vsctl -t 5 list Open_vSwitch", # Capture OVS interface list @@ -149,6 +151,13 @@ # Capture miniflow extract implementations "ovs-appctl dpif-netdev/miniflow-parser-get" ]) + # Capture DPDK and other parameters + self.add_cmd_output("ovs-vsctl -t 5 get Open_vSwitch . other_config", + tags="openvswitch_other_config") + # The '-t 5' adds an upper bound on how long to wait to connect + # to the Open vSwitch server, avoiding hangs when running sos. + self.add_cmd_output("ovs-vsctl -t 5 show", + tags="ovs_vsctl_show") # Gather systemd services logs self.add_journal(units="openvswitch") diff -Nru sosreport-4.4/sos/report/plugins/origin.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/origin.py --- sosreport-4.4/sos/report/plugins/origin.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/origin.py 2023-05-26 22:32:49.000000000 +0530 @@ -162,7 +162,7 @@ "atomic-openshift-master-api", "atomic-openshift-master-controllers"]) - # get logs from the infrastruture pods running in the default ns + # get logs from the infrastructure pods running in the default ns pods = self.exec_cmd("%s get pod -o name -n default" % oc_cmd_admin) for pod in pods['output'].splitlines(): @@ -201,14 +201,11 @@ r'|PASS|PWD|KEY|TOKEN|CRED|SECRET)[^,]*,' \ r'\s*"value":)[^}]*' self.do_cmd_output_sub('oc*json', env_regexp, r'\g "********"') - # LDAP identity provider + # LDAP identity provider (bindPassword) + # and github/google/OpenID identity providers (clientSecret) self.do_file_sub(self.master_cfg, - r"(bindPassword:\s*)(.*)", - r'\1"********"') - # github/google/OpenID identity providers - self.do_file_sub(self.master_cfg, - r"(clientSecret:\s*)(.*)", - r'\1"********"') + r"(bindPassword|clientSecret):\s*(.*)", + r'\1:"********"') class AtomicOpenShift(OpenShiftOrigin, RedHatPlugin): diff -Nru sosreport-4.4/sos/report/plugins/ostree.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ostree.py --- sosreport-4.4/sos/report/plugins/ostree.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ostree.py 2023-05-26 22:32:49.000000000 +0530 @@ -6,7 +6,7 @@ # # See the LICENSE file in the source distribution for further information. -from sos.report.plugins import Plugin, IndependentPlugin +from sos.report.plugins import Plugin, IndependentPlugin, PluginOpt class OSTree(Plugin, IndependentPlugin): @@ -17,6 +17,9 @@ profiles = ('system', 'sysmgmt', 'packagemanager') files = ('/ostree',) services = ('ostree-finalize-staged', 'ostree-boot-complete') + option_list = [ + PluginOpt('fsck', default=False, desc='collect ostree fsck') + ] def setup(self): self.add_copy_spec("/ostree/repo/config") @@ -25,7 +28,7 @@ "ostree admin config-diff", "ostree refs", ]) - if self.get_option("verify"): + if self.get_option("fsck"): self.add_cmd_output("ostree fsck") # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/ovirt.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ovirt.py --- sosreport-4.4/sos/report/plugins/ovirt.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ovirt.py 2023-05-26 22:32:49.000000000 +0530 @@ -165,6 +165,15 @@ "/var/lib/ovirt-engine-reports/jboss_runtime/config" ]) + self.add_file_tags({ + "/etc/ovirt-engine/engine.conf.d/.*": + "ovirt_engine_confd", + "/var/log/ovirt-engine/boot.log": + "ovirt_engine_boot_log", + "/var/log/ovirt-engine/console.log": + "ovirt_engine_console_log" + }) + # Copying host certs; extra copy the hidden .truststore file self.add_forbidden_path([ "/etc/pki/ovirt-engine/keys", @@ -179,16 +188,13 @@ """ Obfuscate sensitive keys. """ - self.do_file_sub( - "/etc/ovirt-engine/engine-config/engine-config.properties", - r"Password.type=(.*)", - r"Password.type=********" - ) - self.do_file_sub( - "/etc/rhevm/rhevm-config/rhevm-config.properties", - r"Password.type=(.*)", - r"Password.type=********" - ) + for f in ["/etc/ovirt-engine/engine-config/engine-config.properties", + "/etc/rhevm/rhevm-config/rhevm-config.properties"]: + self.do_file_sub( + f, + r"(Password.type)=(.*)", + r"\1=********" + ) engine_files = ( 'ovirt-engine.xml', @@ -200,14 +206,14 @@ for filename in engine_files: self.do_file_sub( "/var/tmp/ovirt-engine/config/%s" % filename, - r"(.*)", - r"********" + r"()(.*)()", + r"\1********\3" ) self.do_file_sub( "/etc/ovirt-engine/redhatsupportplugin.conf", - r"proxyPassword=(.*)", - r"proxyPassword=********" + r"(proxyPassword)=(.*)", + r"\1=********" ) passwd_files = [ @@ -219,13 +225,8 @@ conf_path = self.path_join("/etc/ovirt-engine", conf_file) self.do_file_sub( conf_path, - r"passwd=(.*)", - r"passwd=********" - ) - self.do_file_sub( - conf_path, - r"pg-pass=(.*)", - r"pg-pass=********" + r"(passwd|pg-pass)=(.*)", + r"\1=********" ) sensitive_keys = self.DEFAULT_SENSITIVE_KEYS @@ -234,12 +235,11 @@ if keys_opt and keys_opt is not True: sensitive_keys = keys_opt key_list = [x for x in sensitive_keys.split(':') if x] - for key in key_list: - self.do_path_regex_sub( - self.DB_PASS_FILES, - r'{key}=(.*)'.format(key=key), - r'{key}=********'.format(key=key) - ) + self.do_path_regex_sub( + self.DB_PASS_FILES, + r'(%s)=(.*)' % "|".join(key_list), + r'\1=********' + ) # Answer files contain passwords. # Replace all keys that have 'password' in them, instead of hard-coding @@ -252,10 +252,7 @@ ): self.do_path_regex_sub( r'/var/lib/ovirt-engine/setup/answers/.*', - re.compile( - r'(?P[^=]*{item}[^=]*)=.*'.format(item=item), - flags=re.IGNORECASE - ), + r'(?P[^=]*{item}[^=]*)=.*'.format(item=item), r'\g=********' ) diff -Nru sosreport-4.4/sos/report/plugins/ovn_central.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ovn_central.py --- sosreport-4.4/sos/report/plugins/ovn_central.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/ovn_central.py 2023-05-26 22:32:49.000000000 +0530 @@ -13,10 +13,10 @@ RedHatPlugin, DebianPlugin, UbuntuPlugin, - SoSPredicate, ) import json import os +import re class OVNCentral(Plugin): @@ -24,7 +24,24 @@ short_desc = 'OVN Northd' plugin_name = "ovn_central" profiles = ('network', 'virt') - containers = ('ovn-dbs-bundle.*',) + containers = ('ovn-dbs-bundle.*', 'ovn_cluster_north_db_server') + + def _find_sock(self, path, regex_name): + _sfile = os.path.join(path, regex_name) + if self._container_name: + res = self.exec_cmd("ls %s" % path, container=self._container_name) + if res['status'] != 0 or '\n' not in res['output']: + self._log_error( + "Could not retrieve ovn_controller socket path " + "from container %s" % self._container_name + ) + else: + pattern = re.compile(regex_name) + for filename in res['output'].split('\n'): + if pattern.match(filename): + return os.path.join(path, filename) + # File not found, return the regex full path + return _sfile def get_tables_from_schema(self, filename, skip=[]): if self._container_name: @@ -66,7 +83,13 @@ cmds.append('%s list %s' % (ovn_cmd, table)) def setup(self): - self._container_name = self.get_container_by_name(self.containers[0]) + # check if env is a clustered or non-clustered one + if self.container_exists(self.containers[1]): + self._container_name = self.get_container_by_name( + self.containers[1]) + else: + self._container_name = self.get_container_by_name( + self.containers[0]) ovs_rundir = os.environ.get('OVS_RUNDIR') for pidfile in ['ovnnb_db.pid', 'ovnsb_db.pid', 'ovn-northd.pid']: @@ -84,6 +107,12 @@ else: self.add_copy_spec("/var/log/ovn/*.log") + ovn_controller_sock_path = self._find_sock( + self.ovn_sock_path, self.ovn_controller_sock_regex) + + northd_sock_path = self._find_sock(self.ovn_sock_path, + self.ovn_northd_sock_regex) + # ovsdb nb/sb cluster status commands self.add_cmd_output( [ @@ -91,52 +120,49 @@ self.ovn_nbdb_sock_path), 'ovs-appctl -t {} cluster/status OVN_Southbound'.format( self.ovn_sbdb_sock_path), - 'ovn-appctl -t ovn-northd status' + 'ovn-appctl -t {} status'.format(northd_sock_path), + 'ovn-appctl -t {} debug/chassis-features-list'.format( + northd_sock_path), + 'ovn-appctl -t {} connection-status'.format( + ovn_controller_sock_path), ], foreground=True, container=self._container_name, timeout=30 ) # Some user-friendly versions of DB output nbctl_cmds = [ - 'ovn-nbctl show', - 'ovn-nbctl get-ssl', - 'ovn-nbctl get-connection', - 'ovn-nbctl list loadbalancer', - 'ovn-nbctl list Load_Balancer', - 'ovn-nbctl list ACL', - 'ovn-nbctl list Logical_Switch_Port', + 'ovn-nbctl --no-leader-only show', + 'ovn-nbctl --no-leader-only get-ssl', + 'ovn-nbctl --no-leader-only get-connection', ] sbctl_cmds = [ - 'ovn-sbctl show', - 'ovn-sbctl lflow-list', - 'ovn-sbctl get-ssl', - 'ovn-sbctl get-connection', + 'ovn-sbctl --no-leader-only show', + 'ovn-sbctl --no-leader-only lflow-list', + 'ovn-sbctl --no-leader-only get-ssl', + 'ovn-sbctl --no-leader-only get-connection', ] # backward compatibility for path in ['/usr/share/openvswitch', '/usr/share/ovn']: nb_tables = self.get_tables_from_schema(self.path_join( path, 'ovn-nb.ovsschema')) - self.add_database_output(nb_tables, nbctl_cmds, 'ovn-nbctl') + self.add_database_output(nb_tables, nbctl_cmds, + 'ovn-nbctl --no-leader-only') cmds = nbctl_cmds - # Can only run sbdb commands if we are the leader - co = {'cmd': "ovs-appctl -t {} cluster/status OVN_Southbound". - format(self.ovn_sbdb_sock_path), - "output": "Leader: self"} - if self.test_predicate(self, pred=SoSPredicate(self, cmd_outputs=co)): - # backward compatibility - for path in ['/usr/share/openvswitch', '/usr/share/ovn']: - sb_tables = self.get_tables_from_schema(self.path_join( - path, 'ovn-sb.ovsschema'), ['Logical_Flow']) - self.add_database_output(sb_tables, sbctl_cmds, 'ovn-sbctl') - cmds += sbctl_cmds + for path in ['/usr/share/openvswitch', '/usr/share/ovn']: + sb_tables = self.get_tables_from_schema(self.path_join( + path, 'ovn-sb.ovsschema'), ['Logical_Flow']) + self.add_database_output(sb_tables, sbctl_cmds, + 'ovn-sbctl --no-leader-only') + cmds += sbctl_cmds # If OVN is containerized, we need to run the above commands inside - # the container. - + # the container. Removing duplicates (in case there are) to avoid + # failing on collecting output to file on container running commands + cmds = list(set(cmds)) self.add_cmd_output( cmds, foreground=True, container=self._container_name ) @@ -144,14 +170,20 @@ self.add_copy_spec("/etc/sysconfig/ovn-northd") ovs_dbdir = os.environ.get('OVS_DBDIR') - for dbfile in ['ovnnb_db.db', 'ovnsb_db.db']: - self.add_copy_spec([ - self.path_join('/var/lib/openvswitch/ovn', dbfile), - self.path_join('/usr/local/etc/openvswitch', dbfile), - self.path_join('/etc/openvswitch', dbfile), - self.path_join('/var/lib/openvswitch', dbfile), - self.path_join('/var/lib/ovn/etc', dbfile) - ]) + for dbfile in ["ovnnb_db.db", "ovnsb_db.db"]: + for path in [ + "/var/lib/openvswitch/ovn", + "/usr/local/etc/openvswitch", + "/etc/openvswitch", + "/var/lib/openvswitch", + "/var/lib/ovn/etc", + "/var/lib/ovn", + ]: + dbfilepath = self.path_join(path, dbfile) + if os.path.exists(dbfilepath): + self.add_copy_spec(dbfilepath) + self.add_cmd_output( + "ls -lan %s" % dbfilepath, foreground=True) if ovs_dbdir: self.add_copy_spec(self.path_join(ovs_dbdir, dbfile)) @@ -163,6 +195,9 @@ packages = ('openvswitch-ovn-central', 'ovn.*-central', ) ovn_nbdb_sock_path = '/var/run/openvswitch/ovnnb_db.ctl' ovn_sbdb_sock_path = '/var/run/openvswitch/ovnsb_db.ctl' + ovn_sock_path = '/var/run/openvswitch' + ovn_controller_sock_regex = 'ovn-controller.*.ctl' + ovn_northd_sock_regex = 'ovn-northd.*.ctl' class DebianOVNCentral(OVNCentral, DebianPlugin, UbuntuPlugin): @@ -170,3 +205,6 @@ packages = ('ovn-central', ) ovn_nbdb_sock_path = '/var/run/ovn/ovnnb_db.ctl' ovn_sbdb_sock_path = '/var/run/ovn/ovnsb_db.ctl' + ovn_sock_path = '/var/run/ovn' + ovn_controller_sock_regex = 'ovn-controller.*.ctl' + ovn_northd_sock_regex = 'ovn-northd.*.ctl' diff -Nru sosreport-4.4/sos/report/plugins/pacemaker.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pacemaker.py --- sosreport-4.4/sos/report/plugins/pacemaker.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pacemaker.py 2023-05-26 22:32:49.000000000 +0530 @@ -44,14 +44,14 @@ def setup_pcs(self): self.add_copy_spec("/var/log/pcsd/pcsd.log") self.add_cmd_output([ - "pcs config", - "pcs status --full", "pcs stonith sbd status --full", "pcs stonith sbd watchdog list", "pcs stonith history show", - "pcs quorum status", "pcs property list --all" ]) + self.add_cmd_output("pcs config", tags="pcs_config") + self.add_cmd_output("pcs quorum status", tags="pcs_quorum_status") + self.add_cmd_output("pcs status --full", tags="pcs_status") def postproc_crm_shell(self): self.do_cmd_output_sub( diff -Nru sosreport-4.4/sos/report/plugins/pci.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pci.py --- sosreport-4.4/sos/report/plugins/pci.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pci.py 2023-05-26 22:32:49.000000000 +0530 @@ -34,7 +34,8 @@ ]) if self.check_for_bus_devices(): - self.add_cmd_output("lspci -nnvv", root_symlink="lspci") + self.add_cmd_output("lspci -nnvv", root_symlink="lspci", + tags="lspci") self.add_cmd_output("lspci -tv") # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/pcp.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pcp.py --- sosreport-4.4/sos/report/plugins/pcp.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pcp.py 2023-05-26 22:32:49.000000000 +0530 @@ -93,7 +93,7 @@ var_conf_dir ]) - # We explicitely avoid /var/lib/pcp/config/{pmchart,pmlogconf,pmieconf, + # We explicitly avoid /var/lib/pcp/config/{pmchart,pmlogconf,pmieconf, # pmlogrewrite} as in 99% of the cases they are just copies from the # rpms. It does not make up for a lot of size but it contains many # files diff -Nru sosreport-4.4/sos/report/plugins/podman.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/podman.py --- sosreport-4.4/sos/report/plugins/podman.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/podman.py 2023-05-26 22:32:49.000000000 +0530 @@ -57,7 +57,7 @@ self.add_cmd_tags({ 'podman images': 'podman_list_images', - 'podman ps.*': 'podman_list_containers' + 'podman ps': 'podman_list_containers' }) subcmds = [ diff -Nru sosreport-4.4/sos/report/plugins/postfix.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/postfix.py --- sosreport-4.4/sos/report/plugins/postfix.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/postfix.py 2023-05-26 22:32:49.000000000 +0530 @@ -8,6 +8,8 @@ from sos.report.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin +import re + class Postfix(Plugin): @@ -52,6 +54,42 @@ finally: return fp + def forbidden_password_files(self): + forbid_attributes = ( + "lmtp_sasl_password_maps", + "smtp_sasl_password_maps", + "postscreen_dnsbl_reply_map", + "smtp_sasl_auth_cache_name", + ) + fp = [] + prefix = 'hash:' + option_format = re.compile(r"^(.*)=(.*)") + try: + with open(self.path_join('/etc/postfix/main.cf'), 'r') as cffile: + for line in cffile.readlines(): + # ignore comment and check option format + line = re.sub('#.*', '', line) + option = option_format.match(line) + if option is None: + continue + + # sieving + attribute = option.group(1).strip() + if attribute in forbid_attributes: + filepath = option.group(2).strip() + # ignore no filepath + if len(filepath) == 0: + continue + # remove prefix + if filepath.startswith(prefix): + filepath = filepath[len(prefix):] + fp.append(filepath) + except Exception as e: + # error log + msg = f"Error parsing main.cf: {e.args[0]}" + self._log_error(msg) + return fp + def setup(self): self.add_copy_spec([ "/etc/postfix/", @@ -67,6 +105,7 @@ "/etc/postfix/ssl/", ]) self.add_forbidden_path(self.forbidden_ssl_keys_files()) + self.add_forbidden_path(self.forbidden_password_files()) class RedHatPostfix(Postfix, RedHatPlugin): diff -Nru sosreport-4.4/sos/report/plugins/powerpc.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/powerpc.py --- sosreport-4.4/sos/report/plugins/powerpc.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/powerpc.py 2023-05-26 22:32:49.000000000 +0530 @@ -39,7 +39,8 @@ "/proc/swaps", "/proc/version", "/dev/nvram", - "/var/lib/lsvpd/" + "/var/lib/lsvpd/", + "/etc/ct_node_id" ]) self.add_cmd_output([ "ppc64_cpu --info", @@ -52,7 +53,14 @@ "diag_encl -v", "lsvpd -D", "lsmcode -A", - "lscfg -v" + "lscfg -v", + "opal-elog-parse -s", + "opal-elog-parse -a", + "opal-elog-parse -l", + "lssrc -a", + "lsrsrc IBM.MCP", + "rmcdomainstatus -s ctrmc", + "rmcdomainstatus -s ctrmc -a ip" ]) if ispSeries: @@ -76,10 +84,26 @@ "serv_config -l", "bootlist -m both -r", "lparstat -i", + "lparnumascore", + "lparnumascore -c cpu -d 4", + "lparnumascore -c mem -d 3", "ctsnap -xrunrpttr -d %s" % (ctsnap_path), - "lsdevinfo" + "lsdevinfo", + "lsslot", + "amsstat" + ]) + + # Due to the lack of options in invscout for generating log files + # in locations other than /var/adm/invscout/, it is necessary to + # run invscout commands prior to collecting the log files. + self.collect_cmd_output("invscout") + self.collect_cmd_output("invscout -v") + self.add_copy_spec(["/var/adm/invscout/*"]) + + self.add_service_status([ + "hcn-init", + "ctrmc" ]) - self.add_service_status("hcn-init") if isPowerNV: self.add_copy_spec([ diff -Nru sosreport-4.4/sos/report/plugins/processor.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/processor.py --- sosreport-4.4/sos/report/plugins/processor.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/processor.py 2023-05-26 22:32:49.000000000 +0530 @@ -28,7 +28,9 @@ "%s/vulnerabilities/.*" % cpupath: 'cpu_vulns', "%s/vulnerabilities/spectre_v2" % cpupath: 'cpu_vulns_spectre_v2', "%s/vulnerabilities/meltdown" % cpupath: 'cpu_vulns_meltdown', - "%s/cpu.*/online" % cpupath: 'cpu_cores' + "%s/cpu.*/online" % cpupath: 'cpu_cores', + "%s/cpu/cpu0/cpufreq/cpuinfo_max_freq" % cpupath: + 'cpuinfo_max_freq' }) self.add_copy_spec([ @@ -46,9 +48,9 @@ self.add_cmd_output([ "lscpu", "lscpu -ae", + "cpupower frequency-info", "cpupower info", "cpupower idle-info", - "cpupower frequency-info", "cpufreq-info", "cpuid", "cpuid -r", diff -Nru sosreport-4.4/sos/report/plugins/process.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/process.py --- sosreport-4.4/sos/report/plugins/process.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/process.py 2023-05-26 22:32:49.000000000 +0530 @@ -6,6 +6,7 @@ # # See the LICENSE file in the source distribution for further information. +import json import re from sos.report.plugins import Plugin, IndependentPlugin, PluginOpt @@ -59,13 +60,13 @@ self.add_cmd_output("ps auxwwwm", root_symlink="ps", tags=['ps_aux', 'ps_auxww', 'ps_auxwww', - 'ps_auxwwwm', 'insights_ps_auxcww'], + 'ps_auxwwwm', 'ps_auxcww'], priority=1) self.add_cmd_output("pstree -lp", root_symlink="pstree") if self.get_option("lsof"): self.add_cmd_output("lsof +M -n -l -c ''", root_symlink="lsof", - timeout=15, priority=50) + timeout=15, priority=50, tags="lsof") if self.get_option("lsof-threads"): self.add_cmd_output("lsof +M -n -l", timeout=15, priority=50) @@ -88,4 +89,28 @@ "pidstat -p ALL -rudvwsRU --human -h", "pidstat -tl" ]) + + def collect(self): + with self.collection_file('pids_to_packages.json') as pfile: + if not self.policy.package_manager.query_path_command: + pfile.write('Package manager not configured for path queries') + return + _ps = self.exec_cmd('ps --no-headers aex') + pidpkg = {} + paths = {} + if not _ps['status'] == 0: + pfile.write(f"Unable to get process list: {_ps['output']}") + return + for proc in _ps['output'].splitlines(): + proc = proc.strip().split() + pid = proc[0] + path = proc[4] + if not self.path_exists(path): + continue + if path not in paths: + paths[path] = self.policy.package_manager.pkg_by_path(path) + pidpkg[pid] = {'path': path, 'package': paths[path]} + + pfile.write(json.dumps(pidpkg, indent=4)) + # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/pulpcore.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pulpcore.py --- sosreport-4.4/sos/report/plugins/pulpcore.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pulpcore.py 2023-05-26 22:32:49.000000000 +0530 @@ -146,14 +146,16 @@ # TODO obfuscate from /etc/pulp/settings.py : # SECRET_KEY = "eKfeDkTnvss7p5WFqYdGPWxXfHnsbDBx" # 'PASSWORD': 'tGrag2DmtLqKLTWTQ6U68f6MAhbqZVQj', + # the PASSWORD can be also in an one-liner list, so detect its value + # in non-greedy manner till first ',' or '}' self.do_path_regex_sub( "/etc/pulp/settings.py", r"(SECRET_KEY\s*=\s*)(.*)", r"\1********") self.do_path_regex_sub( "/etc/pulp/settings.py", - r"(PASSWORD\S*\s*:\s*)(.*)", - r"\1********") + r"(PASSWORD\S*\s*:\s*)(.*?)(,|\})", + r"\1********\3") # apply the same for "dynaconf list" output that prints settings.py # in a pythonic format self.do_cmd_output_sub( diff -Nru sosreport-4.4/sos/report/plugins/pulp.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pulp.py --- sosreport-4.4/sos/report/plugins/pulp.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/pulp.py 2023-05-26 22:32:49.000000000 +0530 @@ -131,10 +131,13 @@ self.add_cmd_output(prun, suggest_filename="pulp-running_tasks") self.add_cmd_output(csizes, suggest_filename="mongo-collection_sizes") self.add_cmd_output(dbstats, suggest_filename="mongo-db_stats") - self.add_cmd_output([ - "qpid-stat -%s --ssl-certificate=%s -b amqps://localhost:5671" % - (opt, self.messaging_cert_file) for opt in "quc" - ]) + + for opt in "quc": + self.add_cmd_output( + f"qpid-stat -{opt} --ssl-certificate=" + f"{self.messaging_cert_file} -b amqps://localhost:5671", + tags=f"qpid_stat_{opt}") + self.add_cmd_output( "sudo -u pulp PULP_SETTINGS='/etc/pulp/settings.py' " "DJANGO_SETTINGS_MODULE='pulpcore.app.settings' dynaconf list", diff -Nru sosreport-4.4/sos/report/plugins/puppet.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/puppet.py --- sosreport-4.4/sos/report/plugins/puppet.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/puppet.py 2023-05-26 22:32:49.000000000 +0530 @@ -38,10 +38,11 @@ "/var/lib/puppetlabs/puppet/ssl/ca/inventory.txt", "/var/lib/puppet/ssl/ca/inventory.txt", "/var/lib/puppet/ssl/certs/ca.pem", - "/etc/puppetlabs/puppet/ssl/certs/ca.pem", "/etc/puppetlabs/puppet/ssl/certs/{}.pem".format(_hostname), "/var/lib/puppet/ssl/certs/{}.pem".format(_hostname), ]) + self.add_copy_spec("/etc/puppetlabs/puppet/ssl/certs/ca.pem", + tags="puppet_ssl_cert_ca_pem") self.add_cmd_output([ 'facter', diff -Nru sosreport-4.4/sos/report/plugins/python.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/python.py --- sosreport-4.4/sos/report/plugins/python.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/python.py 2023-05-26 22:32:49.000000000 +0530 @@ -69,47 +69,44 @@ self.python_version = "/usr/libexec/platform-python -V" super(RedHatPython, self).setup() + def collect(self): if self.get_option('hashes'): - digests = { - 'digests': [] - } + with self.collection_file('digests.json') as hfile: + hfile.write(json.dumps(self.get_hashes(), indent=4)) - py_paths = [ - '/usr/lib', - '/usr/lib64', - '/usr/local/lib', - '/usr/local/lib64' - ] - - for py_path in py_paths: - for root, _, files in os.walk(self.path_join(py_path)): - for file_ in files: - filepath = self.path_join(root, file_) - if filepath.endswith('.py'): - try: - with open(filepath, 'rb') as f: - digest = hashlib.sha256() - chunk = 1024 - while True: - data = f.read(chunk) - if data: - digest.update(data) - else: - break - - digest = digest.hexdigest() - - digests['digests'].append({ - 'filepath': filepath, - 'sha256': digest - }) - except IOError: - self._log_error( - "Unable to read python file at %s" % - filepath - ) - - self.add_string_as_file(json.dumps(digests), 'digests.json', - plug_dir=True) + def get_hashes(self): + digests = { + 'digests': [] + } + py_paths = [ + '/usr/lib', + '/usr/lib64', + '/usr/local/lib', + '/usr/local/lib64' + ] + + for py_path in py_paths: + for root, _, files in os.walk(self.path_join(py_path)): + for _file in files: + if not _file.endswith('.py'): + continue + filepath = self.path_join(root, _file) + try: + with open(filepath, 'rb') as f: + digest = hashlib.sha256() + data = f.read(1024) + while data: + digest.update(data) + data = f.read(1024) + + digest = digest.hexdigest() + digests['digests'].append({ + 'filepath': filepath, + 'sha256': digest + }) + except IOError: + self._log_error("Unable to read python file at %s" + % filepath) + return digests # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/rabbitmq.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rabbitmq.py --- sosreport-4.4/sos/report/plugins/rabbitmq.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rabbitmq.py 2023-05-26 22:32:49.000000000 +0530 @@ -36,7 +36,8 @@ self.add_cmd_output( 'rabbitmqctl report', container=container, - foreground=True + foreground=True, + tags="rabbitmq_report" ) self.add_cmd_output( "rabbitmqctl eval 'rabbit_diagnostics:maybe_stuck().'", @@ -60,6 +61,11 @@ "/var/log/rabbitmq/*", ]) + self.add_file_tags({ + "/var/log/rabbitmq/rabbit@.*[^-sasl].log": "rabbitmq_logs", + "/var/log/rabbitmq/startup_err": "rabbitmq_startup_err" + }) + # Crash dump can be large in some situation but it is useful to # investigate why rabbitmq crashes. So capture the file without # sizelimit diff -Nru sosreport-4.4/sos/report/plugins/rear.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rear.py --- sosreport-4.4/sos/report/plugins/rear.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rear.py 2023-05-26 22:32:49.000000000 +0530 @@ -39,8 +39,8 @@ def postproc(self): self.do_path_regex_sub( '/etc/rear/*', - r'SSH_ROOT_PASSWORD=(.*)', - r'SSH_ROOT_PASSWORD=********' + r'(SSH_ROOT_PASSWORD)=(.*)', + r'\1=********' ) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/redis.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/redis.py --- sosreport-4.4/sos/report/plugins/redis.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/redis.py 2023-05-26 22:32:49.000000000 +0530 @@ -59,13 +59,8 @@ "/etc/opt/rh/rh-redis5/"]: self.do_file_sub( path + "redis.conf", - r"(masterauth\s).*", - r"\1********" - ) - self.do_file_sub( - path + "redis.conf", - r"(requirepass\s).*", - r"requirepass = ********" + r"(masterauth|requirepass)\s.*", + r"\1 ********" ) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/release.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/release.py --- sosreport-4.4/sos/report/plugins/release.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/release.py 2023-05-26 22:32:49.000000000 +0530 @@ -6,10 +6,11 @@ # # See the LICENSE file in the source distribution for further information. -from sos.report.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin +from sos.report.plugins import Plugin, RedHatPlugin,\ + DebianPlugin, UbuntuPlugin, CosPlugin -class Release(Plugin, UbuntuPlugin): +class Release(Plugin, UbuntuPlugin, CosPlugin): short_desc = 'Linux release information' diff -Nru sosreport-4.4/sos/report/plugins/rhc.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rhc.py --- sosreport-4.4/sos/report/plugins/rhc.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rhc.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,49 @@ +# Copyright (C) 2023 Red Hat, Inc., Jose Castillo + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, RedHatPlugin + + +class Rhc(Plugin, RedHatPlugin): + + """ + RHC is a client tool and daemon that connects the system + to Red Hat hosted services enabling system and + subscription management. This plugin captures + configuration files and the output of 'rhc status'. + """ + short_desc = 'Red Hat client for remote host configured services' + + plugin_name = "rhc" + packages = ("rhc", ) + + def setup(self): + self.add_copy_spec([ + "/etc/rhc/*", + ]) + + self.add_cmd_output([ + "rhc status", + ]) + + def postproc(self): + # hide workers/foreman_rh_cloud.toml FORWARDER_PASSWORD + # Example for scrubbing FORWARDER_PASSWORD + # + # "FORWARDER_PASSWORD=F0rW4rd3RPassW0rD" + # + # to + # + # "FORWARDER_PASSWORD= ******** + + self.do_path_regex_sub("/etc/rhc/workers/foreman_rh_cloud.toml", + r"(FORWARDER_PASSWORD\s*=\s*)(.+)(\"\,)", + r"\1********\3") +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/rhui.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rhui.py --- sosreport-4.4/sos/report/plugins/rhui.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rhui.py 2023-05-26 22:32:49.000000000 +0530 @@ -45,9 +45,13 @@ # hide rhui_manager_password value in (also rotated) answers file self.do_path_regex_sub( r"/root/\.rhui/answers.yaml.*", - r"(\s*rhui_manager_password\s*:)\s*(\S+)", + r"(\s*(rhui_manager|registry)_password\s*:)\s*(\S+)", r"\1********") - # obfuscate twoo cookies for login session + # hide registry_password value in rhui-tools.conf + self.do_path_regex_sub("/etc/rhui/rhui-tools.conf", + r"(registry_password:)\s*(.+)", + r"\1 ********") + # obfuscate two cookies for login session for cookie in ["csrftoken", "sessionid"]: self.do_path_regex_sub( r"/root/\.rhui/.*/cookies.txt", diff -Nru sosreport-4.4/sos/report/plugins/rpm.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rpm.py --- sosreport-4.4/sos/report/plugins/rpm.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/rpm.py 2023-05-26 22:32:49.000000000 +0530 @@ -54,7 +54,7 @@ self.add_cmd_output("rpm -Va", root_symlink="rpm-Va", timeout=900, priority=100, tags=['rpm_va', 'rpm_V', 'rpm_v', - 'insights_rpm_V_packages']) + 'rpm_V_packages']) if self.get_option("rpmdb"): self.add_cmd_output("lsof +D /var/lib/rpm", diff -Nru sosreport-4.4/sos/report/plugins/saltmaster.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/saltmaster.py --- sosreport-4.4/sos/report/plugins/saltmaster.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/saltmaster.py 2023-05-26 22:32:49.000000000 +0530 @@ -30,7 +30,7 @@ def postproc(self): regexp = r'((?m)^\s+.*(pass|secret|(? {}".format(fp, os.readlink(fp))) - else: - expanded.append(fp) + out = f"{fp}" + links = 0 + # expand links like + # /usr/bin/jfr -> /etc/alternatives/jfr -> + # /usr/lib/jvm/java-11-openjdk-11.0.17.0.8-2.el9.x86_64/bin/jfr + # but stop at level 10 to prevent potential recursive links + while self.path_islink(fp) and links < 10: + fp = os.readlink(fp) + out += f" -> {fp}" + links += 1 + expanded.append(out + '\n') return expanded # Check command predicate to avoid costly processing if not self.test_predicate(cmd=True): return - paths = get_env_path_list() - all_fsystem = [] - all_frpm = set( - os.path.realpath(x) for x in self.policy.mangle_package_path( - self.policy.package_manager.all_files() - ) if any([x.startswith(p) for p in paths]) - ) - - for d in paths: - all_fsystem += all_files_system(d) - not_packaged = [x for x in all_fsystem if x not in all_frpm] - not_packaged_expanded = format_output(not_packaged) - self.add_string_as_file('\n'.join(not_packaged_expanded), 'unpackaged', - plug_dir=True) + with self.collection_file('unpackaged') as ufile: + paths = get_env_path_list() + all_fsystem = [] + all_frpm = set( + os.path.realpath(x) for x in self.policy.mangle_package_path( + self.policy.package_manager.all_files() + ) if any([x.startswith(p) for p in paths]) + ) + + for d in paths: + all_fsystem += all_files_system(d) + not_packaged = [x for [x, rp] in all_fsystem if rp not in all_frpm] + not_packaged_expanded = format_output(not_packaged) + + ufile.write(''.join(not_packaged_expanded)) # vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/sos/report/plugins/vdsm.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/vdsm.py --- sosreport-4.4/sos/report/plugins/vdsm.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/vdsm.py 2023-05-26 22:32:49.000000000 +0530 @@ -85,6 +85,12 @@ '/var/lib/vdsm', ]) + self.add_file_tags({ + "/etc/vdsm/vdsm.conf": "vdsm_conf", + "/etc/vdsm/vdsm.id": "vdsm_id", + "/var/log/vdsm/import/import-*.log": "vdsm_import_log" + }) + qemu_pids = self.get_process_pids('qemu-kvm') if qemu_pids: files = ["cmdline", "status", "mountstats"] diff -Nru sosreport-4.4/sos/report/plugins/virsh.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/virsh.py --- sosreport-4.4/sos/report/plugins/virsh.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/virsh.py 2023-05-26 22:32:49.000000000 +0530 @@ -29,18 +29,25 @@ # get host information subcmds = [ - 'list --all', 'domcapabilities', 'capabilities', 'nodeinfo', - 'freecell', + 'freecell --all', 'node-memory-tune', - 'version' + 'version', + 'pool-capabilities', + 'nodecpumap', + 'maxvcpus kvm', + 'sysinfo', + 'nodedev-list --tree', ] for subcmd in subcmds: self.add_cmd_output('%s %s' % (cmd, subcmd), foreground=True) + self.add_cmd_output("%s list --all" % cmd, + tags="virsh_list_all", foreground=True) + # get network, pool and nwfilter elements for k in ['net', 'nwfilter', 'pool']: k_list = self.collect_cmd_output('%s %s-list' % (cmd, k), @@ -69,4 +76,26 @@ self.add_cmd_output('%s %s %s' % (cmd, x, d), foreground=True) + nodedev_output = self.exec_cmd( + '{0} nodedev-list'.format(cmd), foreground=True) + if nodedev_output['status'] == 0: + for n in nodedev_output['output'].splitlines(): + self.add_cmd_output( + '{0} nodedev-dumpxml {1}'.format(cmd, n), foreground=True) + + def postproc(self): + match_exp = r"(\s*passwd\s*=\s*\")([^\"]*)(\".*)" + virsh_path_exps = [ + r"/root/\.cache/virt-manager/.*\.log", + r"/root/\.virt-manager/.*\.log" + ] + for path_exp in virsh_path_exps: + # Scrub passwords in virt-manager logs + # Example of scrubbing: + # + # passwd="hackme" + # To: + # passwd="******" + # + self.do_path_regex_sub(path_exp, match_exp, r"\1******\3") # vim: et ts=4 sw=4 diff -Nru sosreport-4.4/sos/report/plugins/vmware.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/vmware.py --- sosreport-4.4/sos/report/plugins/vmware.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/vmware.py 2023-05-26 22:32:49.000000000 +0530 @@ -34,6 +34,10 @@ "/var/log/vmware-vmusr-root.log" ]) + self.add_file_tags({ + "/etc/vmware-tools/tools.conf": "vmware_tools_conf" + }) + self.add_cmd_output([ "vmware-checkvm", "vmware-toolbox-cmd device list", diff -Nru sosreport-4.4/sos/report/plugins/xfs.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/xfs.py --- sosreport-4.4/sos/report/plugins/xfs.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/xfs.py 2023-05-26 22:32:49.000000000 +0530 @@ -10,11 +10,19 @@ class Xfs(Plugin, IndependentPlugin): + """This plugin collects information on mounted XFS filessystems on the + local system. + + Users should expect `xfs_info` and `xfs_admin` collections by this plugin + for each XFS filesystem that is locally mounted. + """ short_desc = 'XFS filesystem' plugin_name = 'xfs' profiles = ('storage',) + files = ('/sys/fs/xfs', '/proc/fs/xfs') + kernel_mods = ('xfs',) def setup(self): mounts = '/proc/mounts' @@ -22,7 +30,8 @@ for dev in zip(self.do_regex_find_all(ext_fs_regex, mounts)): for e in dev: parts = e.split(' ') - self.add_cmd_output("xfs_info %s" % (parts[1])) + self.add_cmd_output("xfs_info %s" % (parts[1]), + tags="xfs_info") self.add_cmd_output("xfs_admin -l -u %s" % (parts[0])) self.add_copy_spec([ diff -Nru sosreport-4.4/sos/report/plugins/zfs.py sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/zfs.py --- sosreport-4.4/sos/report/plugins/zfs.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/report/plugins/zfs.py 2023-05-26 22:32:49.000000000 +0530 @@ -28,6 +28,28 @@ "zpool status -vx" ]) + self.add_copy_spec([ + "/proc/spl/kmem/slab", + "/proc/spl/kstat/zfs/fm", + "/proc/spl/kstat/zfs/zil", + "/proc/spl/kstat/zfs/dbufs", + "/proc/spl/kstat/zfs/dbgmsg", + "/proc/spl/kstat/zfs/dmu_tx", + "/proc/spl/kstat/zfs/abdstats", + "/proc/spl/kstat/zfs/arcstats", + "/proc/spl/kstat/zfs/dbufstats", + "/proc/spl/kstat/zfs/dnodestats", + "/proc/spl/kstat/zfs/xuio_stats", + "/proc/spl/kstat/zfs/zfetchstats", + "/proc/spl/kstat/zfs/import_progress", + "/proc/spl/kstat/zfs/fletcher_4_bench", + "/proc/spl/kstat/zfs/vdev_cache_stats", + "/proc/spl/kstat/zfs/vdev_raidz_bench", + "/proc/spl/kstat/zfs/vdev_mirror_stats", + "/proc/spl/taskq", + "/proc/spl/taskq-all", + ]) + zpools = self.collect_cmd_output("zpool list -H -o name") if zpools['status'] == 0: zpools_list = zpools['output'].splitlines() diff -Nru sosreport-4.4/sos/utilities.py sosreport-4.5.4ubuntu0.20.04.1/sos/utilities.py --- sosreport-4.4/sos/utilities.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos/utilities.py 2023-05-26 22:32:49.000000000 +0530 @@ -19,11 +19,31 @@ import threading import time import io -import magic - from contextlib import closing from collections import deque +try: + from pkg_resources import parse_version as version_parse +except SyntaxError: + from packaging.version import parse as version_parse + +# try loading magic>=0.4.20 which implements detect_from_filename method +magic_mod = False +try: + import magic + magic.detect_from_filename(__file__) + magic_mod = True +except (ImportError, AttributeError): + log = logging.getLogger('sos') + from textwrap import fill + msg = ("""\ +WARNING: Failed to load 'magic' module version >= 0.4.20 which sos aims to \ +use for detecting binary files. A less effective method will be used. It is \ +recommended to install proper python3-magic package with the module. +""") + log.warning('\n' + fill(msg, 72, replace_whitespace=False) + '\n') + + TIMEOUT_DEFAULT = 300 @@ -75,17 +95,26 @@ :returns: True if binary, else False :rtype: ``bool`` """ - try: - _ftup = magic.detect_from_filename(fname) - _mimes = ['text/', 'inode/'] - return ( - _ftup.encoding == 'binary' and not - any(_ftup.mime_type.startswith(_mt) for _mt in _mimes) - ) - except Exception: - # if for some reason this check fails, don't blindly remove all files - # but instead rely on other checks done by the component - return False + if magic_mod: + try: + _ftup = magic.detect_from_filename(fname) + _mimes = ['text/', 'inode/'] + return ( + _ftup.encoding == 'binary' and not + any(_ftup.mime_type.startswith(_mt) for _mt in _mimes) + ) + except Exception: + pass + # if for some reason the above check fails or magic>=0.4.20 is not present, + # fail over to checking the very first byte of the file content + with open(fname, 'tr') as tfile: + try: + # when opened as above (tr), reading binary content will raise + # an exception + tfile.read(1) + return False + except UnicodeDecodeError: + return True def find(file_pattern, top_dir, max_depth=None, path_pattern=None): @@ -152,7 +181,7 @@ os.chdir(chdir) def _check_poller(proc): - if poller(): + if poller() or proc.poll() == 124: proc.terminate() raise SoSTimeoutError time.sleep(0.01) @@ -216,6 +245,7 @@ _output.close() # until we separate timeouts from the `timeout` command # handle per-cmd timeouts via Plugin status checks + reader.running = False return {'status': 124, 'output': reader.get_contents(), 'truncated': reader.is_full} if to_file: @@ -229,6 +259,8 @@ truncated = reader.is_full except OSError as e: + if to_file: + _output.close() if e.errno == errno.ENOENT: return {'status': 127, 'output': "", 'truncated': ''} else: @@ -379,6 +411,12 @@ return [d for d in _items if d not in _filt] +def parse_version(version): + """Parse the version string + """ + return version_parse(version) + + class FakeReader(): """Used as a replacement AsyncReader for when we are writing directly to disk, and allows us to keep more simplified flows for executing, diff -Nru sosreport-4.4/sos.spec sosreport-4.5.4ubuntu0.20.04.1/sos.spec --- sosreport-4.4/sos.spec 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/sos.spec 2023-05-26 22:32:49.000000000 +0530 @@ -1,22 +1,20 @@ -%{!?python_sitelib: %define python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print get_python_lib()")} - Summary: A set of tools to gather troubleshooting information from a system Name: sos -Version: 4.4 +Version: 4.5.4 Release: 1%{?dist} Group: Applications/System Source0: https://github.com/sosreport/sos/archive/%{name}-%{version}.tar.gz -License: GPLv2+ +License: GPL-2.0-or-later BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot BuildArch: noarch Url: https://github.com/sosreport/sos/ BuildRequires: python3-devel +BuildRequires: python3-setuptools BuildRequires: gettext Requires: python3-rpm -Requires: tar -Requires: xz Requires: python3-pexpect -Requires: python3-magic +Requires: python3-setuptools +Recommends: python3-magic Recommends: python3-pyyaml Obsoletes: sos-collector <= 1.9 @@ -40,13 +38,17 @@ install -d -m 755 ${RPM_BUILD_ROOT}/etc/sos/presets.d install -d -m 755 ${RPM_BUILD_ROOT}/etc/sos/groups.d install -d -m 755 ${RPM_BUILD_ROOT}/etc/sos/extras.d +install -d -m 755 ${RPM_BUILD_ROOT}/etc/tmpfiles.d/ install -m 644 %{name}.conf ${RPM_BUILD_ROOT}/etc/sos/%{name}.conf +install -m 644 tmpfiles/tmpfilesd-sos-rh.conf ${RPM_BUILD_ROOT}/etc/tmpfiles.d/%{name}.conf rm -rf ${RPM_BUILD_ROOT}/usr/config/ %find_lang %{name} || echo 0 -%files -f %{name}.lang +# internationalization is currently broken. Uncomment this line once fixed. +# %%files -f %%{name}.lang +%files %{_sbindir}/sos %{_sbindir}/sosreport %{_sbindir}/sos-collector @@ -54,6 +56,7 @@ %dir /etc/sos/presets.d %dir /etc/sos/extras.d %dir /etc/sos/groups.d +/etc/tmpfiles.d/%{name}.conf %{python3_sitelib}/* %{_mandir}/man1/* %{_mandir}/man5/* @@ -62,6 +65,22 @@ %config(noreplace) %{_sysconfdir}/sos/sos.conf %changelog +* Fri May 26 2023 Jake Hunsaker = 4.5.4 +- New upstream release + +* Fri Apr 28 2023 Jake Hunsaker = 4.5.3 +- New upstream release + +* Fri Mar 31 2023 Jake Hunsaker = 4.5.2 +- New upstream release +- Migrated to SPDX license + +* Wed Mar 01 2023 Jake Hunsaker = 4.5.1 +- New upstream release + +* Wed Feb 01 2023 Jake Hunsaker = 4.5.0 +- New upstream release + * Mon Aug 15 2022 Jake Hunsaker = 4.4 - New upstream release diff -Nru sosreport-4.4/tests/cleaner_tests/basic_function_tests/binary_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/basic_function_tests/binary_test.py --- sosreport-4.4/tests/cleaner_tests/basic_function_tests/binary_test.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/basic_function_tests/binary_test.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,21 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, IndependentPlugin + + +class BinaryPlugin(Plugin, IndependentPlugin): + """Test plugin for testing binary removal with --clean + """ + + plugin_name = 'binary_test' + short_desc = 'test plugin for removing binaries with --clean' + + + def setup(self): + self.add_copy_spec('/var/log/binary_test.tar.xz') Binary files /tmp/5urYQka9UX/sosreport-4.4/tests/cleaner_tests/basic_function_tests/binary_test.tar.xz and /tmp/NJomlL8we3/sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/basic_function_tests/binary_test.tar.xz differ diff -Nru sosreport-4.4/tests/cleaner_tests/basic_function_tests/report_with_mask.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/basic_function_tests/report_with_mask.py --- sosreport-4.4/tests/cleaner_tests/basic_function_tests/report_with_mask.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/basic_function_tests/report_with_mask.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,119 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos_tests import StageOneReportTest, StageTwoReportTest + +import re + + +class ReportWithMask(StageOneReportTest): + """Testing around basic --clean/--mask usage and expectations + + :avocado: tags=stageone + """ + + sos_cmd = '--mask -o host,networking' + + def test_mask_was_run(self): + self.assertOutputContains('Beginning obfuscation') + self.assertOutputContains('Obfuscation completed') + + def test_private_map_was_generated(self): + self.assertOutputContains('A mapping of obfuscated elements is available at') + map_file = re.findall('/.*sosreport-.*-private_map', self.cmd_output.stdout)[-1] + self.assertFileExists(map_file) + + def test_tarball_named_obfuscated(self): + self.assertTrue('obfuscated' in self.archive) + + def test_archive_type_correct(self): + self.assertSosLogContains('Loaded .* as type sos report directory') + + def test_localhost_was_obfuscated(self): + self.assertFileHasContent('hostname', 'host0') + + def test_ip_address_was_obfuscated(self): + # Note: do not test for starting with the 100.* block here, as test + # machines may have /32 addresses. Instead, test that the actual + # IP address is not present + self.assertFileNotHasContent('ip_addr', self.sysinfo['pre']['networking']['ip_addr']) + + def test_loopback_was_not_obfuscated(self): + self.assertFileHasContent('ip_addr', '127.0.0.1/8') + + def test_mac_addrs_were_obfuscated(self): + content = self.get_file_content('sos_commands/networking/ip_maddr_show') + for line in content.splitlines(): + if line.strip().startswith('link'): + mac = line.strip().split()[1] + assert mac.startswith('53:4f:53'), "Found unobfuscated mac addr %s" % mac + + +class ReportWithCleanedKeywords(StageOneReportTest): + """Testing for obfuscated keywords provided by the user + + :avocado: tags=stageone + """ + + sos_cmd = '--clean -o filesys,kernel --keywords=fstab,Linux,tmp --no-update' + + # Will the 'tmp' be properly treated in path to working dir without raising an error? + # To make this test effective, we assume the test runs on a system / with Policy + # returning '/var/tmp' as temp.dir + def test_keyword_in_tempdir_path(self): + self.assertOutputContains('Your sosreport has been generated and saved in:') + self.assertTrue('tmp/' in self.archive) + + # Ok, sort of cheesy here but this does actually test filename changes on + # a file common to all distros + def test_filename_obfuscated(self): + self.assertFileNotCollected('/etc/fstab') + self.assertFileGlobInArchive('/etc/obfuscatedword*') + + def test_keyword_obfuscated_in_file(self): + self.assertFileNotHasContent('sos_commands/kernel/uname_-a', 'Linux') + + +class DefaultRemoveBinaryFilesTest(StageTwoReportTest): + """Testing that binary files are removed by default + + :avocado: tags=stagetwo + """ + + files = [('binary_test.tar.xz', '/var/log/binary_test.tar.xz')] + install_plugins = ['binary_test'] + sos_cmd = '--clean -o binary_test,kernel,host' + + def test_binary_removed(self): + self.assertFileNotCollected('var/log/binary_test.tar.xz') + + def test_binaries_removed_reported(self): + self.assertOutputContains('\[removed .* unprocessable files\]') + + +class KeepBinaryFilesTest(StageTwoReportTest): + """Testing that --keep-binary-files will function as expected + + :avocado: tags=stagetwo + """ + + files = [('binary_test.tar.xz', '/var/log/binary_test.tar.xz')] + install_plugins = ['binary_test'] + sos_cmd = '--clean --keep-binary-files -o binary_test,kernel,host' + + def test_warning_message_shown(self): + self.assertOutputContains( + 'WARNING: binary files that potentially contain sensitive information ' + 'will NOT be removed from the final archive' + ) + + def test_binary_is_in_archive(self): + self.assertFileCollected('var/log/binary_test.tar.xz') + + def test_no_binaries_reported_removed(self): + self.assertOutputNotContains('\[removed .* unprocessable files\]') diff -Nru sosreport-4.4/tests/cleaner_tests/existing_archive.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/existing_archive.py --- sosreport-4.4/tests/cleaner_tests/existing_archive.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/existing_archive.py 2023-05-26 22:32:49.000000000 +0530 @@ -89,3 +89,22 @@ """Ensure that the 'testuser1' user created at install is obfuscated """ self.assertFileNotHasContent('var/log/anaconda/journal.log', 'testuser1') + +class ExistingArchiveCleanTmpTest(StageTwoReportTest): + """Continuation of above tests which requires cleaning var / tmp keywords + + Note that this copies heavily from the full_report_run test. + + :avocado: tags=stagetwo + """ + + sos_cmd = '-v --keywords var,tmp,avocado --disable-parsers ip,ipv6,mac,username \ + --no-update tests/test_data/%s.tar.xz' % ARCHIVE + sos_component = 'clean' + + def test_sys_tmp_not_obfuscated(self): + """ Ensure that keywords var, tmp and avocado remains in the final archive + path despite they are parts of the --tmp-dir + """ + self.assertTrue(self.archive.startswith(os.getenv('AVOCADO_TESTS_COMMON_TMPDIR'))) + diff -Nru sosreport-4.4/tests/cleaner_tests/full_report/full_report_run.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/full_report/full_report_run.py --- sosreport-4.4/tests/cleaner_tests/full_report/full_report_run.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/full_report/full_report_run.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,86 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +import json +import re + +from avocado.utils import process +from sos_tests import StageTwoReportTest + + +class FullCleanTest(StageTwoReportTest): + """Run an unrestricted report execution through sos clean, ensuring that + our obfuscation is reliable across arbitrary plugin sets and not just the + 'main' plugins that tend to collect data needing obfuscation + + :avocado: tags=stagetwo + """ + + sos_cmd = '-v --clean' + sos_timeout = 600 + # replace with an empty placeholder, make sure that this test case is not + # influenced by previous clean runs + files = [('default_mapping', '/etc/sos/cleaner/default_mapping')] + packages = { + 'rhel': ['python3-systemd'], + 'ubuntu': ['python3-systemd'] + } + + def pre_sos_setup(self): + # ensure that case-insensitive matching of FQDNs and shortnames work + from systemd import journal + from socket import gethostname + host = gethostname() + short = host.split('.')[0] + sosfd = journal.stream('sos-testing') + sosfd.write( + "This is a test line from sos clean testing. The hostname %s " + "should not appear, nor should %s in an obfuscated archive. The " + "shortnames of %s and %s should also not appear." + % (host.lower(), host.upper(), short.lower(), short.upper()) + ) + + def test_private_map_was_generated(self): + self.assertOutputContains('A mapping of obfuscated elements is available at') + map_file = re.findall('/.*sosreport-.*-private_map', self.cmd_output.stdout)[-1] + self.assertFileExists(map_file) + + def test_tarball_named_obfuscated(self): + self.assertTrue('obfuscated' in self.archive) + + def test_archive_type_correct(self): + self.assertSosLogContains('Loaded .* as type sos report directory') + + def test_hostname_not_in_any_file(self): + host = self.sysinfo['pre']['networking']['hostname'] + short = host.split('.')[0] + # much faster to just use grep here + content = self.grep_for_content(host) + self.grep_for_content(short) + if not content: + assert True + else: + self.fail("Hostname appears in files: %s" + % "\n".join(f for f in content)) + + def test_no_empty_obfuscations(self): + # get the private map file name + map_file = re.findall('/.*sosreport-.*-private_map', self.cmd_output.stdout)[-1] + with open(map_file, 'r') as mf: + map_json = json.load(mf) + for mapping in map_json: + for key, val in map_json[mapping].items(): + assert key, "Empty key found in %s" % mapping + assert val, "%s mapping for '%s' empty" % (mapping, key) + + def test_ip_not_in_any_file(self): + ip = self.sysinfo['pre']['networking']['ip_addr'] + content = self.grep_for_content(ip) + if not content: + assert True + else: + self.fail("IP appears in files: %s" % "\n".join(f for f in content)) diff -Nru sosreport-4.4/tests/cleaner_tests/full_report_run.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/full_report_run.py --- sosreport-4.4/tests/cleaner_tests/full_report_run.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/full_report_run.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,86 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -import json -import re - -from avocado.utils import process -from sos_tests import StageTwoReportTest - - -class FullCleanTest(StageTwoReportTest): - """Run an unrestricted report execution through sos clean, ensuring that - our obfuscation is reliable across arbitrary plugin sets and not just the - 'main' plugins that tend to collect data needing obfuscation - - :avocado: tags=stagetwo - """ - - sos_cmd = '-v --clean' - sos_timeout = 600 - # replace with an empty placeholder, make sure that this test case is not - # influenced by previous clean runs - files = ['/etc/sos/cleaner/default_mapping'] - packages = { - 'rhel': ['python3-systemd'], - 'ubuntu': ['python3-systemd'] - } - - def pre_sos_setup(self): - # ensure that case-insensitive matching of FQDNs and shortnames work - from systemd import journal - from socket import gethostname - host = gethostname() - short = host.split('.')[0] - sosfd = journal.stream('sos-testing') - sosfd.write( - "This is a test line from sos clean testing. The hostname %s " - "should not appear, nor should %s in an obfuscated archive. The " - "shortnames of %s and %s should also not appear." - % (host.lower(), host.upper(), short.lower(), short.upper()) - ) - - def test_private_map_was_generated(self): - self.assertOutputContains('A mapping of obfuscated elements is available at') - map_file = re.findall('/.*sosreport-.*-private_map', self.cmd_output.stdout)[-1] - self.assertFileExists(map_file) - - def test_tarball_named_obfuscated(self): - self.assertTrue('obfuscated' in self.archive) - - def test_archive_type_correct(self): - self.assertSosLogContains('Loaded .* as type sos report directory') - - def test_hostname_not_in_any_file(self): - host = self.sysinfo['pre']['networking']['hostname'] - short = host.split('.')[0] - # much faster to just use grep here - content = self.grep_for_content(host) + self.grep_for_content(short) - if not content: - assert True - else: - self.fail("Hostname appears in files: %s" - % "\n".join(f for f in content)) - - def test_no_empty_obfuscations(self): - # get the private map file name - map_file = re.findall('/.*sosreport-.*-private_map', self.cmd_output.stdout)[-1] - with open(map_file, 'r') as mf: - map_json = json.load(mf) - for mapping in map_json: - for key, val in map_json[mapping].items(): - assert key, "Empty key found in %s" % mapping - assert val, "%s mapping for '%s' empty" % (mapping, key) - - def test_ip_not_in_any_file(self): - ip = self.sysinfo['pre']['networking']['ip_addr'] - content = self.grep_for_content(ip) - if not content: - assert True - else: - self.fail("IP appears in files: %s" % "\n".join(f for f in content)) diff -Nru sosreport-4.4/tests/cleaner_tests/ipv6_test/ipv6.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/ipv6_test/ipv6.py --- sosreport-4.4/tests/cleaner_tests/ipv6_test/ipv6.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/ipv6_test/ipv6.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,23 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, IndependentPlugin + + +class IPv6(Plugin, IndependentPlugin): + """Collect arbitrary file containing crafted ipv6 adresses to test ipv6 + obfuscation. + """ + + plugin_name = 'ipv6' + short_desc = 'fake plugin to test ipv6 obfuscation' + + def setup(self): + self.add_copy_spec([ + '/tmp/sos-test-ipv6.txt', + ]) diff -Nru sosreport-4.4/tests/cleaner_tests/ipv6_test/ipv6_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/ipv6_test/ipv6_test.py --- sosreport-4.4/tests/cleaner_tests/ipv6_test/ipv6_test.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/ipv6_test/ipv6_test.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,38 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos_tests import StageTwoReportTest + +MOCK_FILE = '/tmp/sos-test-ipv6.txt' + +class IPv6Test(StageTwoReportTest): + """Place artificial plugin collecting crafted text file with ipv6 adresses + to make sure ipv6 obfuscation works when calling 'sos clean' like a user + would. + + :avocado: tags=stagetwo + """ + + install_plugins = ['ipv6'] + sos_cmd = '-v --clean -o ipv6' + sos_timeout = 600 + # replace default mapping to avoid being influenced by previous runs + # place mock file with crafted address used by mocked plugin + files = [ + ('default_mapping', '/etc/sos/cleaner/default_mapping'), + ('sos-test-ipv6.txt', MOCK_FILE) + ] + + def test_valid_ipv6(self): + self.assertFileCollected(MOCK_FILE) + self.assertFileHasContent(MOCK_FILE, 'GOOD_IP=') + self.assertFileNotHasContent(MOCK_FILE, 'GOOD_IP=3000:505f:505f:505f:505f:505f:505f:505f') + + def test_bad_ipv6(self): + self.assertFileHasContent(MOCK_FILE, 'BAD_IP=') + self.assertFileNotHasContent(MOCK_FILE, 'BAD_IP=505f:505f:505f:505f:505f:505f:505f:505f') diff -Nru sosreport-4.4/tests/cleaner_tests/ipv6_test/sos-test-ipv6.txt sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/ipv6_test/sos-test-ipv6.txt --- sosreport-4.4/tests/cleaner_tests/ipv6_test/sos-test-ipv6.txt 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/ipv6_test/sos-test-ipv6.txt 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,7 @@ +This is a test file for ipv6 address obfuscation. + +This address should be obfuscated +GOOD_IP=3000:505f:505f:505f:505f:505f:505f:505f + +This address should not get sos stuck +BAD_IP=505f:505f:505f:505f:505f:505f:505f:505f diff -Nru sosreport-4.4/tests/cleaner_tests/report_disabled_parsers.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/report_disabled_parsers.py --- sosreport-4.4/tests/cleaner_tests/report_disabled_parsers.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/report_disabled_parsers.py 2023-05-26 22:32:49.000000000 +0530 @@ -33,7 +33,7 @@ # make sure that the other parsers remain functional def test_localhost_was_obfuscated(self): - self.assertFileHasContent('/etc/hostname', 'host0') + self.assertFileHasContent('hostname', 'host0') def test_mac_addrs_were_obfuscated(self): content = self.get_file_content('sos_commands/networking/ip_maddr_show') @@ -54,7 +54,7 @@ sos_component = 'clean' def test_localhost_not_obfuscated(self): - self.assertFileNotHasContent('/etc/hostname', self.sysinfo['pre']['networking']['hostname']) + self.assertFileNotHasContent('hostname', self.sysinfo['pre']['networking']['hostname']) self.assertFileNotHasContent('uname', self.sysinfo['pre']['networking']['hostname']) def test_local_ip_was_obfuscated(self): diff -Nru sosreport-4.4/tests/cleaner_tests/report_with_mask.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/report_with_mask.py --- sosreport-4.4/tests/cleaner_tests/report_with_mask.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/report_with_mask.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,112 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos_tests import StageOneReportTest, StageTwoReportTest - -import re - - -class ReportWithMask(StageOneReportTest): - """Testing around basic --clean/--mask usage and expectations - - :avocado: tags=stageone - """ - - sos_cmd = '--mask -o host,networking' - - def test_mask_was_run(self): - self.assertOutputContains('Beginning obfuscation') - self.assertOutputContains('Obfuscation completed') - - def test_private_map_was_generated(self): - self.assertOutputContains('A mapping of obfuscated elements is available at') - map_file = re.findall('/.*sosreport-.*-private_map', self.cmd_output.stdout)[-1] - self.assertFileExists(map_file) - - def test_tarball_named_obfuscated(self): - self.assertTrue('obfuscated' in self.archive) - - def test_archive_type_correct(self): - self.assertSosLogContains('Loaded .* as type sos report directory') - - def test_localhost_was_obfuscated(self): - self.assertFileHasContent('/etc/hostname', 'host0') - - def test_ip_address_was_obfuscated(self): - # Note: do not test for starting with the 100.* block here, as test - # machines may have /32 addresses. Instead, test that the actual - # IP address is not present - self.assertFileNotHasContent('ip_addr', self.sysinfo['pre']['networking']['ip_addr']) - - def test_loopback_was_not_obfuscated(self): - self.assertFileHasContent('ip_addr', '127.0.0.1/8') - - def test_mac_addrs_were_obfuscated(self): - content = self.get_file_content('sos_commands/networking/ip_maddr_show') - for line in content.splitlines(): - if line.strip().startswith('link'): - mac = line.strip().split()[1] - assert mac.startswith('53:4f:53'), "Found unobfuscated mac addr %s" % mac - - -class ReportWithCleanedKeywords(StageOneReportTest): - """Testing for obfuscated keywords provided by the user - - :avocado: tags=stageone - """ - - sos_cmd = '--clean -o filesys,kernel --keywords=fstab,Linux' - - # Ok, sort of cheesy here but this does actually test filename changes on - # a file common to all distros - def test_filename_obfuscated(self): - self.assertFileNotCollected('/etc/fstab') - self.assertFileGlobInArchive('/etc/obfuscatedword*') - - def test_keyword_obfuscated_in_file(self): - self.assertFileNotHasContent('sos_commands/kernel/uname_-a', 'Linux') - - -class DefaultRemoveBinaryFilesTest(StageTwoReportTest): - """Testing that binary files are removed by default - - :avocado: tags=stagetwo - """ - - files = ['/var/log/binary_test.tar.xz'] - install_plugins = ['binary_test'] - sos_cmd = '--clean -o binary_test,kernel,host' - - def test_binary_removed(self): - self.assertFileNotCollected('var/log/binary_test.tar.xz') - - def test_binaries_removed_reported(self): - self.assertOutputContains('\[removed .* unprocessable files\]') - - -class KeepBinaryFilesTest(StageTwoReportTest): - """Testing that --keep-binary-files will function as expected - - :avocado: tags=stagetwo - """ - - files = ['/var/log/binary_test.tar.xz'] - install_plugins = ['binary_test'] - sos_cmd = '--clean --keep-binary-files -o binary_test,kernel,host' - - def test_warning_message_shown(self): - self.assertOutputContains( - 'WARNING: binary files that potentially contain sensitive information ' - 'will NOT be removed from the final archive' - ) - - def test_binary_is_in_archive(self): - self.assertFileCollected('var/log/binary_test.tar.xz') - - def test_no_binaries_reported_removed(self): - self.assertOutputNotContains('\[removed .* unprocessable files\]') diff -Nru sosreport-4.4/tests/cleaner_tests/skip_versioning/skip_version_ip_parser.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_versioning/skip_version_ip_parser.py --- sosreport-4.4/tests/cleaner_tests/skip_versioning/skip_version_ip_parser.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_versioning/skip_version_ip_parser.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,36 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos_tests import StageTwoReportTest + +DO_SKIP = '/tmp/sos-test-version.txt' +NO_SKIP = '/tmp/sos-test-version-noskip' + +class SkipVersionIPParser(StageTwoReportTest): + """Ensures that we _skip_ files ending in 'version' (or 'version.txt') to + avoid incorrectly obfuscating version numbers. + + :avocado: tags=stagetwo + """ + + files = [ + ('sos-test-version.txt', DO_SKIP), + ('sos-test-version-noskip', NO_SKIP) + ] + install_plugins = ['skip_versions'] + sos_cmd = '--clean -o skip_versions' + + def test_version_file_skipped(self): + self.assertFileCollected(DO_SKIP) + self.assertFileHasContent(DO_SKIP, '10.11.12.13') + self.assertFileHasContent(DO_SKIP, '6.0.0.1') + + def test_incorrect_version_file_not_skipped(self): + self.assertFileCollected(NO_SKIP) + self.assertFileNotHasContent(NO_SKIP, '10.11.12.13') + self.assertFileNotHasContent(NO_SKIP, '6.0.0.1') diff -Nru sosreport-4.4/tests/cleaner_tests/skip_versioning/skip_versions.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_versioning/skip_versions.py --- sosreport-4.4/tests/cleaner_tests/skip_versioning/skip_versions.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_versioning/skip_versions.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,24 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, IndependentPlugin + + +class SkipVersions(Plugin, IndependentPlugin): + """Collect the fake version files from the test suite, to ensure proper + skipping of version files + """ + + plugin_name = 'skip_versions' + short_desc = 'fake plugin to test skipping version files via the IP parser' + + def setup(self): + self.add_copy_spec([ + '/tmp/sos-test-version.txt', + '/tmp/sos-test-version-noskip' + ]) diff -Nru sosreport-4.4/tests/cleaner_tests/skip_versioning/sos-test-version-noskip sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_versioning/sos-test-version-noskip --- sosreport-4.4/tests/cleaner_tests/skip_versioning/sos-test-version-noskip 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_versioning/sos-test-version-noskip 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,6 @@ +This is a test file for skipping version files with the IP parser. + +All dotted-quad strings SHOULD be changed in this file. + +10.11.12.13 +6.0.0.1 diff -Nru sosreport-4.4/tests/cleaner_tests/skip_versioning/sos-test-version.txt sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_versioning/sos-test-version.txt --- sosreport-4.4/tests/cleaner_tests/skip_versioning/sos-test-version.txt 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_versioning/sos-test-version.txt 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,6 @@ +This is a test file for skipping version files with the IP parser. + +No dotted-quad strings should be changed in this file. + +10.11.12.13 +6.0.0.1 diff -Nru sosreport-4.4/tests/cleaner_tests/skip_version_ip_parser.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_version_ip_parser.py --- sosreport-4.4/tests/cleaner_tests/skip_version_ip_parser.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/skip_version_ip_parser.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,33 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos_tests import StageTwoReportTest - -DO_SKIP = '/tmp/sos-test-version.txt' -NO_SKIP = '/tmp/sos-test-version-noskip' - -class SkipVersionIPParser(StageTwoReportTest): - """Ensures that we _skip_ files ending in 'version' (or 'version.txt') to - avoid incorrectly obfuscating version numbers. - - :avocado: tags=stagetwo - """ - - files = [DO_SKIP, NO_SKIP] - install_plugins = ['skip_versions'] - sos_cmd = '--clean -o skip_versions' - - def test_version_file_skipped(self): - self.assertFileCollected(DO_SKIP) - self.assertFileHasContent(DO_SKIP, '10.11.12.13') - self.assertFileHasContent(DO_SKIP, '6.0.0.1') - - def test_incorrect_version_file_not_skipped(self): - self.assertFileCollected(NO_SKIP) - self.assertFileNotHasContent(NO_SKIP, '10.11.12.13') - self.assertFileNotHasContent(NO_SKIP, '6.0.0.1') diff -Nru sosreport-4.4/tests/cleaner_tests/unicode_open/sos-test-unicode.txt sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/unicode_open/sos-test-unicode.txt --- sosreport-4.4/tests/cleaner_tests/unicode_open/sos-test-unicode.txt 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/unicode_open/sos-test-unicode.txt 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,5 @@ +This is a line with no unicode in it. +This line has some in it æßøĄ. +If opened normally, the last line may cause errors. +So this file is used to test opening with errors='replace'. +This line has the address 192.168.1.1 in it to ensure our cleaner tests are actually processing this file. diff -Nru sosreport-4.4/tests/cleaner_tests/unicode_open/unicode_in_file.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/unicode_open/unicode_in_file.py --- sosreport-4.4/tests/cleaner_tests/unicode_open/unicode_in_file.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/unicode_open/unicode_in_file.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,34 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos_tests import StageTwoReportTest + + +class UnicodeOpenTest(StageTwoReportTest): + """This test ensures that we can safely open files that have embedded + unicode in them, and that those files do not trigger an exception that + leaves them uncleaned. + + :avocado: tags=stagetwo + """ + + sos_cmd = '--clean -o unicode_test,networking,host' + files = [('sos-test-unicode.txt', '/tmp/sos-test-unicode.txt')] + install_plugins = ['unicode_test'] + + def test_file_was_collected(self): + self.assertFileCollected('/tmp/sos-test-unicode.txt') + + def test_file_was_opened(self): + # if this fails, then we hit an exception when opening the file + self.assertSosLogContains('Obfuscating tmp/sos-test-unicode.txt') + self.assertSosLogNotContains('.*Unable to parse.*') + + def test_obfuscation_complete(self): + # make sure that we didn't stop processing the file after the unicode + self.assertFileNotHasContent('tmp/sos-test-unicode.txt', '192.168.1.1') diff -Nru sosreport-4.4/tests/cleaner_tests/unicode_open/unicode_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/unicode_open/unicode_test.py --- sosreport-4.4/tests/cleaner_tests/unicode_open/unicode_test.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/cleaner_tests/unicode_open/unicode_test.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,20 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, IndependentPlugin + + +class UnicodeTest(Plugin, IndependentPlugin): + """Fake plugin to test the handling of a file with embedded unicode + """ + + plugin_name = 'unicode_test' + short_desc = 'Fake plugin to test unicode file handling' + + def setup(self): + self.add_copy_spec('/tmp/sos-test-unicode.txt') diff -Nru sosreport-4.4/tests/product_tests/foreman/foreman_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/product_tests/foreman/foreman_tests.py --- sosreport-4.4/tests/product_tests/foreman/foreman_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/product_tests/foreman/foreman_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -26,6 +26,7 @@ """ sos_cmd = '-v' + arch = ['x86_64'] def test_tfm_plugins_ran(self): self.assertPluginIncluded([ @@ -102,6 +103,7 @@ """ sos_cmd = '-v -k foreman.proxyfeatures=on' + arch = ['x86_64'] @redhat_only def test_proxyfeatures_collected(self): @@ -115,6 +117,7 @@ """ sos_cmd = '-v -o foreman_installer' + arch = ['x86_64'] def test_foreman_installer_etc_collected(self): self.assertFileCollected("/etc/foreman-installer/scenarios.d") @@ -127,6 +130,7 @@ """ sos_cmd = '-v -o foreman_proxy' + arch = ['x86_64'] def test_foreman_proxy_settings_collected(self): self.assertFileCollected("/etc/foreman-proxy/settings.yml") diff -Nru sosreport-4.4/tests/report_tests/basic_report_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/basic_report_tests.py --- sosreport-4.4/tests/report_tests/basic_report_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/basic_report_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -6,8 +6,9 @@ # # See the LICENSE file in the source distribution for further information. - -from sos_tests import StageOneReportTest +from avocado.core.exceptions import TestSkipError +from avocado.utils import process +from sos_tests import StageOneReportTest, redhat_only class NormalSoSReport(StageOneReportTest): @@ -33,6 +34,24 @@ def test_free_symlink_created(self): self.assertFileCollected('free') + def test_tag_summary_created(self): + self.assertTrue( + 'tag_summary' in self.manifest['components']['report'], + "No tag summary generated in report" + ) + self.assertTrue( + isinstance(self.manifest['components']['report']['tag_summary'], dict), + "Tag summary malformed" + ) + + @redhat_only + def test_version_matches_package(self): + if not self.params.get('TESTLOCAL') == 'true': + raise TestSkipError("Not testing local package installation") + _pkg_ver = process.run('rpm -q sos').stdout.decode().split('-')[1] + self.assertSosUILogContains(f"(version {_pkg_ver})") + self.assertEqual(self.manifest['version'], _pkg_ver) + class LogLevelTest(StageOneReportTest): """ diff -Nru sosreport-4.4/tests/report_tests/encryption_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/encryption_tests.py --- sosreport-4.4/tests/report_tests/encryption_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/encryption_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -42,7 +42,7 @@ sos_cmd = "-o host,networking --clean --encrypt-pass %s" % encrypt_pass def test_hostname_obfuscated(self): - self.assertFileHasContent('/etc/hostname', 'host0') + self.assertFileHasContent('hostname', 'host0') def test_tarball_named_obfuscated(self): self.assertTrue('obfuscated' in self.archive) diff -Nru sosreport-4.4/tests/report_tests/low_priority_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/low_priority_tests.py --- sosreport-4.4/tests/report_tests/low_priority_tests.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/low_priority_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,33 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from os.path import exists +from sos_tests import StageOneReportTest + + +class LowPrioTest(StageOneReportTest): + """ + Ensures that --low-priority properly sets our defined constraints on our + own process + + :avocado: tags=stageone + """ + + sos_cmd = '--low-priority -o kernel' + + def test_ionice_class_set(self): + _class = self.manifest['components']['report']['priority']['io_class'] + if exists('/usr/bin/ionice'): + self.assertSosLogContains('Set IO class to idle') + self.assertEqual(_class, 'idle') + else: + self.assertEqual(_class, 'unknown') + + def test_niceness_set(self): + self.assertSosLogContains('Set niceness of report to 19') + self.assertEqual(self.manifest['components']['report']['priority']['niceness'], 19) diff -Nru sosreport-4.4/tests/report_tests/options_tests/options_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/options_tests/options_tests.py --- sosreport-4.4/tests/report_tests/options_tests/options_tests.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/options_tests/options_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,45 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + + +from sos_tests import StageTwoReportTest + + +class OptionsFromConfigTest(StageTwoReportTest): + """Ensure that we handle options specified in sos.conf properly + + :avocado: tags=stagetwo + """ + + files = [('options_tests_sos.conf', '/etc/sos/sos.conf')] + sos_cmd = '-v ' + + def test_case_id_from_config(self): + self.assertTrue('8675309' in self.archive) + + def test_plugins_skipped_from_config(self): + self.assertPluginNotIncluded(['networking', 'logs']) + + def test_plugopts_logged_from_config(self): + self.assertSosLogContains( + "Set kernel plugin option to \(name=with-timer, desc='gather /proc/timer\* statistics', value=True, default=False\)" + ) + self.assertSosLogContains( + "Set kernel plugin option to \(name=trace, desc='gather /sys/kernel/debug/tracing/trace file', value=True, default=False\)" + ) + + def test_disabled_plugopts_not_loaded(self): + self.assertSosLogNotContains("Set networking plugin option to") + + def test_plugopts_actually_set(self): + self.assertFileCollected('sys/kernel/debug/tracing/trace') + + def test_effective_options_logged_correctly(self): + self.assertSosLogContains( + "effective options now: --batch --case-id 8675309 --plugopts kernel.with-timer=on,kernel.trace=yes --skip-plugins networking,logs" + ) diff -Nru sosreport-4.4/tests/report_tests/options_tests/options_tests_sos.conf sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/options_tests/options_tests_sos.conf --- sosreport-4.4/tests/report_tests/options_tests/options_tests_sos.conf 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/options_tests/options_tests_sos.conf 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,18 @@ +[global] +#verbose = 3 + +[report] +skip-plugins = networking,logs +case-id = 8675309 + +[collect] +#primary = myhost.example.com + +[clean] +#no-update = true + +[plugin_options] +#rpm.rpmva = off +kernel.with-timer = on +kernel.trace = yes +networking.traceroute = yes diff -Nru sosreport-4.4/tests/report_tests/options_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/options_tests.py --- sosreport-4.4/tests/report_tests/options_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/options_tests.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,45 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - - -from sos_tests import StageTwoReportTest - - -class OptionsFromConfigTest(StageTwoReportTest): - """Ensure that we handle options specified in sos.conf properly - - :avocado: tags=stagetwo - """ - - files = [('/etc/sos/options_tests_sos.conf', '/etc/sos/sos.conf')] - sos_cmd = '-v ' - - def test_case_id_from_config(self): - self.assertTrue('8675309' in self.archive) - - def test_plugins_skipped_from_config(self): - self.assertPluginNotIncluded(['networking', 'logs']) - - def test_plugopts_logged_from_config(self): - self.assertSosLogContains( - "Set kernel plugin option to \(name=with-timer, desc='gather /proc/timer\* statistics', value=True, default=False\)" - ) - self.assertSosLogContains( - "Set kernel plugin option to \(name=trace, desc='gather /sys/kernel/debug/tracing/trace file', value=True, default=False\)" - ) - - def test_disabled_plugopts_not_loaded(self): - self.assertSosLogNotContains("Set networking plugin option to") - - def test_plugopts_actually_set(self): - self.assertFileCollected('sys/kernel/debug/tracing/trace') - - def test_effective_options_logged_correctly(self): - self.assertSosLogContains( - "effective options now: --batch --case-id 8675309 --plugopts kernel.with-timer=on,kernel.trace=yes --skip-plugins networking,logs" - ) diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/collect_manual_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/collect_manual_tests.py --- sosreport-4.4/tests/report_tests/plugin_tests/collect_manual_tests.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/collect_manual_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,37 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + + +from sos_tests import StageOneReportTest + + +class CollectManualTest(StageOneReportTest): + """Test to ensure that collect() is working for plugins that + directly call it as part of their collections + + :avocado: tags=stageone + """ + + sos_cmd = '-o unpackaged,python -k python.hashes' + # unpackaged is only a RedHatPlugin + redhat_only = True + + def test_unpackaged_list_collected(self): + self.assertFileCollected('sos_commands/unpackaged/unpackaged') + + def test_python_hashes_collected(self): + self.assertFileCollected('sos_commands/python/digests.json') + + def test_no_strings_dir(self): + self.assertFileNotCollected('sos_strings/') + + def test_manifest_collections_correct(self): + pkgman = self.get_plugin_manifest('unpackaged') + self.assertTrue(any(c['name'] == 'unpackaged' for c in pkgman['collections'])) + pyman = self.get_plugin_manifest('python') + self.assertTrue(any(c['name'] == 'digests.json' for c in pyman['collections'])) diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/krb5.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/krb5.py --- sosreport-4.4/tests/report_tests/plugin_tests/krb5.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/krb5.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,39 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + + +from sos_tests import StageTwoReportTest, redhat_only, ubuntu_only + +class Krb5PluginTest(StageTwoReportTest): + """Ensure that the krb5 plugin activates for the distros that we support it + on. + + See https://github.com/sosreport/sos/issues/3041 + + :avocado: tags=stageone + """ + + sos_cmd = '-o krb5' + packages = { + 'rhel': ['krb5-libs', 'krb5-server'], + 'Ubuntu': ['krb5-user', 'krb5-kdc'] + } + + def test_plugin_ran(self): + self.assertPluginIncluded('krb5') + + def test_conf_collected(self): + self.assertFileCollected('/etc/krb5.conf') + + @ubuntu_only + def test_ubuntu_kdcdir_collected(self): + self.assertFileGlobInArchive('/var/lib/krb5kdc/*') + + @redhat_only + def test_redhat_kdcdir_collected(self): + self.assertFileGlobInArchive('/var/kerberos/krb5kdc/*') diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/logs.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/logs.py --- sosreport-4.4/tests/report_tests/plugin_tests/logs.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/logs.py 2023-05-26 22:32:49.000000000 +0530 @@ -30,7 +30,7 @@ self.assertFileGlobInArchive('/var/log/journal/*') -class LogsSizeLimitTest(StageTwoReportTest): +class JournalSizeLimitTest(StageTwoReportTest): """Test that journal size limiting is working and is independent of --log-size @@ -40,7 +40,7 @@ :avocado: tags=stagetwo """ - sos_cmd = '-o logs' + sos_cmd = '-o logs --journal-size=20 --log-size=10' sos_timeout = 500 packages = { 'rhel': ['python3-systemd'], @@ -48,36 +48,35 @@ } def pre_sos_setup(self): + # if the journal is already over our size limit, don't write anything + # new to it + from systemd import journal + _reader = journal.Reader() + _size = _reader.get_usage() / 1024 / 1024 + if _size > 20: + return # write 20MB at a time to side-step rate/size limiting on some distros - # write over 100MB to ensure we will actually size limit inside sos, + # write over 20MB to ensure we will actually size limit inside sos, # allowing for any compression or de-dupe systemd does - from systemd import journal sosfd = journal.stream('sos-testing') rsize = 10 * 1048576 - for i in range(6): + for i in range(2): # generate 10MB, write it, then write it in reverse. # Spend less time generating new strings rand = ''.join(random.choice(ascii_uppercase + digits) for _ in range(rsize)) sosfd.write(rand + '\n') # sleep to avoid burst rate-limiting - sleep(10) + sleep(5) sosfd.write(rand[::-1] + '\n') def test_journal_size_limit(self): journ = 'sos_commands/logs/journalctl_--no-pager' self.assertFileCollected(journ) jsize = os.stat(self.get_name_in_archive(journ)).st_size - assert jsize <= 105906176, "Collected journal is larger than 100MB (size: %s)" % jsize - assert jsize > 27262976, "Collected journal limited by --log-size (size: %s)" % jsize + assert jsize <= 20971520, "Collected journal is larger than 20MB (size: %s)" % jsize def test_journal_tailed_and_linked(self): tailed = self.get_name_in_archive('sos_strings/logs/journalctl_--no-pager.tailed') self.assertFileExists(tailed) journ = self.get_name_in_archive('sos_commands/logs/journalctl_--no-pager') assert os.path.islink(journ), "Journal in sos_commands/logs is not a symlink" - - def test_string_not_in_manifest(self): - # we don't want truncated collections appearing in the strings section - # of the manifest for the plugin - manifest = self.get_plugin_manifest('logs') - self.assertFalse(manifest['strings']) diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/networking.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/networking.py --- sosreport-4.4/tests/report_tests/plugin_tests/networking.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/networking.py 2023-05-26 22:32:49.000000000 +0530 @@ -33,6 +33,9 @@ def test_netdevs_properly_iterated(self): for dev in os.listdir('/sys/class/net'): - self.assertFileGlobInArchive( - "sos_commands/networking/ethtool_*_%s" % dev - ) + # some file(s) in the dir might not be real netdevs, see e.g. + # https://lwn.net/Articles/142330/ + if not dev.startswith('bonding_'): + self.assertFileGlobInArchive( + "sos_commands/networking/ethtool_*_%s" % dev + ) diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/plugin_environment/default_env_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/plugin_environment/default_env_test.py --- sosreport-4.4/tests/report_tests/plugin_tests/plugin_environment/default_env_test.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/plugin_environment/default_env_test.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,28 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, IndependentPlugin + + +class DefaultEnv(Plugin, IndependentPlugin): + + plugin_name = 'default_env_test' + short_desc = 'Fake plugin to test default env var handling' + + def setup(self): + self.set_default_cmd_environment({ + 'TORVALDS': 'Linus', + 'GREATESTSPORT': 'hockey' + }) + + self.add_cmd_output( + "sh -c 'echo Does '$TORVALDS' play '$GREATESTSPORT'?'", + suggest_filename='env_var_test' + ) + + self.add_env_var(['TORVALDS', 'GREATESTSPORT']) diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/plugin_environment/plugin_environment.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/plugin_environment/plugin_environment.py --- sosreport-4.4/tests/report_tests/plugin_tests/plugin_environment/plugin_environment.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/plugin_environment/plugin_environment.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,44 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +import os + +from sos_tests import StageTwoReportTest + + +class PluginDefaultEnvironmentTest(StageTwoReportTest): + """ + Ensure that being able to set a default set of environment variables is + working correctly and does not leave a lingering env var on the system + + :avocado: tags=stagetwo + """ + + install_plugins = ['default_env_test'] + sos_cmd = '-o default_env_test' + + def test_environment_used_in_cmd(self): + self.assertFileHasContent( + 'sos_commands/default_env_test/env_var_test', + 'Does Linus play hockey?' + ) + + def test_environment_setting_logged(self): + self.assertSosLogContains( + 'Default environment for all commands now set to' + ) + + def test_environment_not_set_on_host(self): + self.assertTrue('TORVALDS' not in os.environ) + self.assertTrue('GREATESTSPORT' not in os.environ) + + def test_environment_not_captured(self): + # we should still have an empty environment file + self.assertFileCollected('environment') + self.assertFileNotHasContent('environment', 'TORVALDS') + self.assertFileNotHasContent('environment', 'GREATESTSPORT') diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/plugin_environment.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/plugin_environment.py --- sosreport-4.4/tests/report_tests/plugin_tests/plugin_environment.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/plugin_environment.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,44 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -import os - -from sos_tests import StageTwoReportTest - - -class PluginDefaultEnvironmentTest(StageTwoReportTest): - """ - Ensure that being able to set a default set of environment variables is - working correctly and does not leave a lingering env var on the system - - :avocado: tags=stageone - """ - - install_plugins = ['default_env_test'] - sos_cmd = '-o default_env_test' - - def test_environment_used_in_cmd(self): - self.assertFileHasContent( - 'sos_commands/default_env_test/env_var_test', - 'Does Linus play hockey?' - ) - - def test_environment_setting_logged(self): - self.assertSosLogContains( - 'Default environment for all commands now set to' - ) - - def test_environment_not_set_on_host(self): - self.assertTrue('TORVALDS' not in os.environ) - self.assertTrue('GREATESTSPORT' not in os.environ) - - def test_environment_not_captured(self): - # we should still have an empty environment file - self.assertFileCollected('environment') - self.assertFileNotHasContent('environment', 'TORVALDS') - self.assertFileNotHasContent('environment', 'GREATESTSPORT') diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/sos_extras/sos_extras.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sos_extras/sos_extras.py --- sosreport-4.4/tests/report_tests/plugin_tests/sos_extras/sos_extras.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sos_extras/sos_extras.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,32 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos_tests import StageTwoReportTest + + +class SosExtrasPluginTest(StageTwoReportTest): + """Ensure that the sos_extras plugin is properly executing command and + file collections as defined in the sos_extras config file + + :avocado: tags=stagetwo + """ + + files = [('sos_testing.conf', '/etc/sos/extras.d/sos_testing.conf')] + # rather than only enabling this plugin, make sure the enablement trigger + # is working + sos_cmd = '-n logs,networking,devicemapper,filesys,systemd' + + def test_extras_enabled(self): + self.assertPluginIncluded('sos_extras') + + def test_setup_message_displayed(self): + self.assertOutputContains('Collecting data from extras file /etc/sos/extras.d/sos_testing.conf') + + def test_extras_config_parsed(self): + self.assertFileCollected('/etc/fstab') + self.assertFileCollected('sos_commands/sos_extras/sos_testing.conf/echo_sos_test') diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/sos_extras/sos_testing.conf sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sos_extras/sos_testing.conf --- sosreport-4.4/tests/report_tests/plugin_tests/sos_extras/sos_testing.conf 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sos_extras/sos_testing.conf 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,4 @@ +# this is a fake sos_extras config file +# +echo sos test +:/etc/fstab diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/sos_extras.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sos_extras.py --- sosreport-4.4/tests/report_tests/plugin_tests/sos_extras.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sos_extras.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,32 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos_tests import StageTwoReportTest - - -class SosExtrasPluginTest(StageTwoReportTest): - """Ensure that the sos_extras plugin is properly executing command and - file collections as defined in the sos_extras config file - - :avocado: tags=stagetwo - """ - - files = ['/etc/sos/extras.d/sos_testing.conf'] - # rather than only enabling this plugin, make sure the enablement trigger - # is working - sos_cmd = '-n logs,networking,devicemapper,filesys,systemd' - - def test_extras_enabled(self): - self.assertPluginIncluded('sos_extras') - - def test_setup_message_displayed(self): - self.assertOutputContains('Collecting data from extras file /etc/sos/extras.d/sos_testing.conf') - - def test_extras_config_parsed(self): - self.assertFileCollected('/etc/fstab') - self.assertFileCollected('sos_commands/sos_extras/sos_testing.conf/echo_sos_test') diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/string_collection_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/string_collection_tests.py --- sosreport-4.4/tests/report_tests/plugin_tests/string_collection_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/string_collection_tests.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,37 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - - -from sos_tests import StageOneReportTest - - -class CollectStringTest(StageOneReportTest): - """Test to ensure that add_string_as_file() is working for plugins that - directly call it as part of their collections - - :avocado: tags=stageone - """ - - sos_cmd = '-o unpackaged,python -k python.hashes' - # unpackaged is only a RedHatPlugin - redhat_only = True - - def test_unpackaged_list_collected(self): - self.assertFileCollected('sos_commands/unpackaged/unpackaged') - - def test_python_hashes_collected(self): - self.assertFileCollected('sos_commands/python/digests.json') - - def test_no_strings_dir(self): - self.assertFileNotCollected('sos_strings/') - - def test_manifest_strings_correct(self): - pkgman = self.get_plugin_manifest('unpackaged') - self.assertTrue(pkgman['strings']['unpackaged']) - pyman = self.get_plugin_manifest('python') - self.assertTrue(pyman['strings']['digests_json']) diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/sudo/sudo-ldap.conf sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sudo/sudo-ldap.conf --- sosreport-4.4/tests/report_tests/plugin_tests/sudo/sudo-ldap.conf 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sudo/sudo-ldap.conf 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,6 @@ +# This is a fake sudo-ldap.conf +# +# Nothing important is here +uri ldaps://ldap.example.com +binddn cn=sudo,dc=example,dc=com +bindpw sostestpassword diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/sudo/sudo.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sudo/sudo.py --- sosreport-4.4/tests/report_tests/plugin_tests/sudo/sudo.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sudo/sudo.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,36 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos_tests import StageOneReportTest, StageTwoReportTest + + +class SudoPluginTest(StageOneReportTest): + """Basic sanity check to make sure ocmmon config files are collected + + :avocado: tags=stageone + """ + + sos_cmd = '-o sudo' + + def test_sudo_conf_collected(self): + self.assertFileCollected('/etc/sudo.conf') + self.assertFileCollected('/etc/sudoers') + + +class SudoLdapScrubbedTest(StageTwoReportTest): + """Ensure that sudo conf is picked up and properly scrubbed + + :avocado: tags=stagetwo + """ + + sos_cmd = '-o sudo' + files = [('sudo-ldap.conf', '/etc/sudo-ldap.conf')] + + def test_bindpw_scrubbed(self): + self.assertFileNotHasContent('/etc/sudo-ldap.conf', 'sostestpassword') + diff -Nru sosreport-4.4/tests/report_tests/plugin_tests/sudo.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sudo.py --- sosreport-4.4/tests/report_tests/plugin_tests/sudo.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/plugin_tests/sudo.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,36 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos_tests import StageOneReportTest, StageTwoReportTest - - -class SudoPluginTest(StageOneReportTest): - """Basic sanity check to make sure ocmmon config files are collected - - :avocado: tags=stageone - """ - - sos_cmd = '-o sudo' - - def test_sudo_conf_collected(self): - self.assertFileCollected('/etc/sudo.conf') - self.assertFileCollected('/etc/sudoers') - - -class SudoLdapScrubbedTest(StageTwoReportTest): - """Ensure that sudo conf is picked up and properly scrubbed - - :avocado: tags=stagetwo - """ - - sos_cmd = '-o sudo' - files = ['/etc/sudo-ldap.conf'] - - def test_bindpw_scrubbed(self): - self.assertFileNotHasContent('/etc/sudo-ldap.conf', 'sostestpassword') - diff -Nru sosreport-4.4/tests/report_tests/smoke_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/smoke_tests.py --- sosreport-4.4/tests/report_tests/smoke_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/smoke_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -10,7 +10,7 @@ from avocado.utils import process -from sos_tests import StageOneReportTest, SOS_BIN, redhat_only, ubuntu_only +from sos_tests import StageOneReportTest, redhat_only, ubuntu_only # These are the header strings in --list-plugins output @@ -27,7 +27,7 @@ """ def pre_sos_setup(self): - _cmd = '%s report --list-plugins' % SOS_BIN + _cmd = '%s report --list-plugins' % self.sos_bin out = process.run(_cmd, timeout=300).stdout.decode() reg = DISABLED + '(.*?)' + OPTIONS self.plugs = [] diff -Nru sosreport-4.4/tests/report_tests/timeout/timeout_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/timeout/timeout_test.py --- sosreport-4.4/tests/report_tests/timeout/timeout_test.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/timeout/timeout_test.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,22 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, IndependentPlugin + + +class TimeoutTest(Plugin, IndependentPlugin): + + plugin_name = 'timeout_test' + short_desc = 'Tests timeout functionality in test suite' + plugin_timeout = 100 + + + def setup(self): + self.add_cmd_output('sleep 15') + self.add_cmd_output('echo I slept great', suggest_filename='echo_good') + self.add_cmd_output('sleep 30', timeout=10) diff -Nru sosreport-4.4/tests/report_tests/timeout/timeout_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/timeout/timeout_tests.py --- sosreport-4.4/tests/report_tests/timeout/timeout_tests.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/timeout/timeout_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,69 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos_tests import StageTwoReportTest + + +class PluginTimeoutTest(StageTwoReportTest): + """Test that whole plugin timeout control is working + + :avocado: tags=stagetwo + """ + + install_plugins = ['timeout_test'] + sos_cmd = '-o timeout_test -vvv --plugin-timeout=10' + + def test_correct_plugin_timeout(self): + man = self.get_plugin_manifest('timeout_test') + self.assertEquals(man['timeout'], 10) + + def test_plugin_timed_out(self): + self.assertSosLogNotContains('collected plugin \'timeout_test\' in') + self.assertSosUILogContains('Plugin timeout_test timed out') + + def test_no_output_collected(self): + self.assertFileNotExists('sos_commands/timeout_test/echo_out') + + +class NativeCmdTimeoutTest(StageTwoReportTest): + """Test that the native timeout control for the plugin API is working + + :avocado: tags=stagetwo + """ + + install_plugins = ['timeout_test'] + sos_cmd = '-o timeout_test,host -vvv' + + def test_correct_plugin_timeout(self): + man = self.get_plugin_manifest('timeout_test') + self.assertEquals(man['timeout'], 100) + hman = self.get_plugin_manifest('host') + self.assertEquals(hman['timeout'], 300) + + def test_plugin_completed(self): + self.assertSosLogContains('collected plugin \'timeout_test\' in') + self.assertFileCollected('sos_commands/timeout_test/echo_good') + + def test_command_timed_out(self): + self.assertSosLogContains(r"\[plugin:timeout_test\] command 'sleep 30' timed out after 10s") + self.assertFileCollected('sos_commands/timeout_test/sleep_30') + +class MultipleTimeoutValues(NativeCmdTimeoutTest): + """Test that our plugin timeout option priority is functioning correctly + + :avocado: tags=stagetwo + """ + + install_plugins = ['timeout_test'] + sos_cmd = '-o timeout_test,host --plugin-timeout=30 -k timeout_test.timeout=60' + + def test_correct_plugin_timeout(self): + man = self.get_plugin_manifest('timeout_test') + self.assertEquals(man['timeout'], 60) + hman = self.get_plugin_manifest('host') + self.assertEquals(hman['timeout'], 30) diff -Nru sosreport-4.4/tests/report_tests/timeout_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/timeout_tests.py --- sosreport-4.4/tests/report_tests/timeout_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/report_tests/timeout_tests.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,69 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos_tests import StageTwoReportTest - - -class PluginTimeoutTest(StageTwoReportTest): - """Test that whole plugin timeout control is working - - :avocado: tags=stagetwo - """ - - install_plugins = ['timeout_test'] - sos_cmd = '-o timeout_test -vvv --plugin-timeout=10' - - def test_correct_plugin_timeout(self): - man = self.get_plugin_manifest('timeout_test') - self.assertEquals(man['timeout'], 10) - - def test_plugin_timed_out(self): - self.assertSosLogNotContains('collected plugin \'timeout_test\' in') - self.assertSosUILogContains('Plugin timeout_test timed out') - - def test_no_output_collected(self): - self.assertFileNotExists('sos_commands/timeout_test/echo_out') - - -class NativeCmdTimeoutTest(StageTwoReportTest): - """Test that the native timeout control for the plugin API is working - - :avocado: tags=stagetwo - """ - - install_plugins = ['timeout_test'] - sos_cmd = '-o timeout_test,host -vvv' - - def test_correct_plugin_timeout(self): - man = self.get_plugin_manifest('timeout_test') - self.assertEquals(man['timeout'], 100) - hman = self.get_plugin_manifest('host') - self.assertEquals(hman['timeout'], 300) - - def test_plugin_completed(self): - self.assertSosLogContains('collected plugin \'timeout_test\' in') - self.assertFileCollected('sos_commands/timeout_test/echo_good') - - def test_command_timed_out(self): - self.assertSosLogContains(r"\[plugin:timeout_test\] command 'sleep 30' timed out after 10s") - self.assertFileCollected('sos_commands/timeout_test/sleep_30') - -class MultipleTimeoutValues(NativeCmdTimeoutTest): - """Test that our plugin timeout option priority is functioning correctly - - :avocado: tags=stagetwo - """ - - install_plugins = ['timeout_test'] - sos_cmd = '-o timeout_test,host --plugin-timeout=30 -k timeout_test.timeout=60' - - def test_correct_plugin_timeout(self): - man = self.get_plugin_manifest('timeout_test') - self.assertEquals(man['timeout'], 60) - hman = self.get_plugin_manifest('host') - self.assertEquals(hman['timeout'], 30) diff -Nru sosreport-4.4/tests/sos_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/sos_tests.py --- sosreport-4.4/tests/sos_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/sos_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -11,9 +11,11 @@ from avocado.core.output import LOG_UI from avocado import Test from avocado.utils import archive, process, distro, software_manager +from avocado.utils.cpu import get_arch from fnmatch import fnmatch import glob +import inspect import json import os import pickle @@ -25,7 +27,7 @@ SOS_REPO_ROOT = os.path.realpath(os.path.join(SOS_TEST_DIR, '../')) SOS_PLUGIN_DIR = os.path.realpath(os.path.join(SOS_REPO_ROOT, 'sos/report/plugins')) SOS_TEST_DATA_DIR = os.path.realpath(os.path.join(SOS_TEST_DIR, 'test_data')) -SOS_BIN = os.path.realpath(os.path.join(SOS_TEST_DIR, '../bin/sos')) +SOS_TEST_BIN = os.path.realpath(os.path.join(SOS_TEST_DIR, '../bin/sos')) RH_DIST = ['rhel', 'centos', 'fedora'] UBUNTU_DIST = ['Ubuntu', 'debian'] @@ -64,11 +66,13 @@ _klass_name = None _tmpdir = None _exception_expected = False + _local_sos_bin = shutil.which('sos') or SOS_TEST_BIN sos_cmd = '' sos_timeout = 600 redhat_only = False ubuntu_only = False end_of_test_case = False + arch = [] @property def klass_name(self): @@ -82,6 +86,10 @@ self._tmpdir = os.getenv('AVOCADO_TESTS_COMMON_TMPDIR') + self.klass_name return self._tmpdir + @property + def sos_bin(self): + return self._local_sos_bin if self.params.get('TESTLOCAL') == 'true' else SOS_TEST_BIN + def generate_sysinfo(self): """Collects some basic information about the system for later reference in individual tests @@ -221,6 +229,22 @@ if self.local_distro not in UBUNTU_DIST: raise TestSkipError("Not running on a Ubuntu or Debian distro") + def check_arch_for_enablement(self): + """ + Check if the test case is meant only for a specific architecture, and + if it is, that we're also currently running on (one of) those arches. + + This relies on the `arch` class attr, which should be a list. If the + list is empty, assume all arches are acceptable. Otherwise, raise a + TestSkipError. + """ + sys_arch = get_arch() + if not self.arch or sys_arch in self.arch: + return True + raise TestSkipError(f"Unsupported architecture {sys_arch} for test " + f"(supports: {self.arch})") + + def setUp(self): """Setup the tmpdir and any needed mocking for the test, then execute the defined sos command. Ensure that we only run the sos command once @@ -228,6 +252,7 @@ """ self.local_distro = distro.detect().name self.check_distro_for_enablement() + self.check_arch_for_enablement() # check to prevent multiple setUp() runs if not os.path.isdir(self.tmpdir): # setup our class-shared tmpdir @@ -344,7 +369,7 @@ self._manifest = json.loads(content) except Exception: self._manifest = '' - self.warn('Could not load manifest for test') + self.warning('Could not load manifest for test') return self._manifest @property @@ -417,7 +442,7 @@ return os.path.join(self.tmpdir, "sosreport-%s" % self.__class__.__name__) def _generate_sos_command(self): - return "%s %s -v --batch --tmp-dir %s %s" % (SOS_BIN, self.sos_component, self.tmpdir, self.sos_cmd) + return "%s %s -v --batch --tmp-dir %s %s" % (self.sos_bin, self.sos_component, self.tmpdir, self.sos_cmd) def _execute_sos_cmd(self): super(BaseSoSReportTest, self)._execute_sos_cmd() @@ -786,7 +811,7 @@ for plug in self.install_plugins: if not plug.endswith('.py'): plug += '.py' - fake_plug = os.path.join(SOS_TEST_DATA_DIR, 'fake_plugins', plug) + fake_plug = os.path.join(os.path.dirname(inspect.getfile(self.__class__)), plug) if os.path.exists(fake_plug): shutil.copy(fake_plug, SOS_PLUGIN_DIR) _installed.append(os.path.realpath(os.path.join(SOS_PLUGIN_DIR, plug))) @@ -841,23 +866,21 @@ for pkg in pkgs: self.sm.remove(pkg) - def _copy_test_file(self, src, dest=None): + def _copy_test_file(self, filetup): """Helper to copy files from tests/test_data to relevant locations on the test system. If ``dest`` is provided, use that as the destination filename instead of using the ``src`` name """ - - if dest is None: - dest = src + src, dest = filetup dir_added = False if os.path.exists(dest): os.rename(dest, dest + '.sostesting') - _dir = os.path.split(src)[0] + _dir = os.path.dirname(dest) if not os.path.exists(_dir): os.makedirs(_dir) self._created_files.append(_dir) dir_added = True - _test_file = os.path.join(SOS_TEST_DIR, 'test_data', src.lstrip('/')) + _test_file = os.path.join(os.path.dirname(inspect.getfile(self.__class__)), src.lstrip('/')) shutil.copy(_test_file, dest) if not dir_added: self._created_files.append(dest) @@ -871,10 +894,9 @@ test(s) have run. """ for mfile in self.files: - if isinstance(mfile, tuple): - self._copy_test_file(mfile[0], mfile[1]) - else: - self._copy_test_file(mfile) + if not isinstance(mfile, tuple): + raise Exception(f"Mocked files must be provided via tuples, not {mfile.__class__}") + self._copy_test_file(mfile) if self._created_files: self._write_file_to_tmpdir('mocked_files', json.dumps(self._created_files)) @@ -949,7 +971,7 @@ sos_cmd = '' def _generate_sos_command(self): - return "%s %s" % (SOS_BIN, self.sos_cmd) + return "%s %s" % (self.sos_bin, self.sos_cmd) @skipIf(lambda x: x._exception_expected, "Non-zero exit code expected") def test_help_output_successful(self): diff -Nru sosreport-4.4/tests/test_data/etc/sos/extras.d/sos_clean_config.conf sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sos/extras.d/sos_clean_config.conf --- sosreport-4.4/tests/test_data/etc/sos/extras.d/sos_clean_config.conf 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sos/extras.d/sos_clean_config.conf 1970-01-01 05:30:00.000000000 +0530 @@ -1,3 +0,0 @@ -# sos_extras config file to assist with the clean_config test. - -:/var/log/clean_config_test.txt diff -Nru sosreport-4.4/tests/test_data/etc/sos/extras.d/sos_testing.conf sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sos/extras.d/sos_testing.conf --- sosreport-4.4/tests/test_data/etc/sos/extras.d/sos_testing.conf 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sos/extras.d/sos_testing.conf 1970-01-01 05:30:00.000000000 +0530 @@ -1,4 +0,0 @@ -# this is a fake sos_extras config file -# -echo sos test -:/etc/fstab diff -Nru sosreport-4.4/tests/test_data/etc/sos/options_tests_sos.conf sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sos/options_tests_sos.conf --- sosreport-4.4/tests/test_data/etc/sos/options_tests_sos.conf 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sos/options_tests_sos.conf 1970-01-01 05:30:00.000000000 +0530 @@ -1,18 +0,0 @@ -[global] -#verbose = 3 - -[report] -skip-plugins = networking,logs -case-id = 8675309 - -[collect] -#primary = myhost.example.com - -[clean] -#no-update = true - -[plugin_options] -#rpm.rpmva = off -kernel.with-timer = on -kernel.trace = yes -networking.traceroute = yes diff -Nru sosreport-4.4/tests/test_data/etc/sos/sos.conf sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sos/sos.conf --- sosreport-4.4/tests/test_data/etc/sos/sos.conf 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sos/sos.conf 1970-01-01 05:30:00.000000000 +0530 @@ -1,16 +0,0 @@ -[global] -#verbose = 3 - -[report] -#skip-plugins = rpm,selinux,dovecot - -[collect] -#primary = myhost.example.com - -[clean] -keywords = shibboleth -domains = sosexample.com -#no-update = true - -[plugin_options] -#rpm.rpmva = off diff -Nru sosreport-4.4/tests/test_data/etc/sudo-ldap.conf sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sudo-ldap.conf --- sosreport-4.4/tests/test_data/etc/sudo-ldap.conf 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/etc/sudo-ldap.conf 1970-01-01 05:30:00.000000000 +0530 @@ -1,6 +0,0 @@ -# This is a fake sudo-ldap.conf -# -# Nothing important is here -uri ldaps://ldap.example.com -binddn cn=sudo,dc=example,dc=com -bindpw sostestpassword diff -Nru sosreport-4.4/tests/test_data/fake_plugins/binary_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/fake_plugins/binary_test.py --- sosreport-4.4/tests/test_data/fake_plugins/binary_test.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/fake_plugins/binary_test.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,21 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos.report.plugins import Plugin, IndependentPlugin - - -class BinaryPlugin(Plugin, IndependentPlugin): - """Test plugin for testing binary removal with --clean - """ - - plugin_name = 'binary_test' - short_desc = 'test plugin for removing binaries with --clean' - - - def setup(self): - self.add_copy_spec('/var/log/binary_test.tar.xz') diff -Nru sosreport-4.4/tests/test_data/fake_plugins/default_env_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/fake_plugins/default_env_test.py --- sosreport-4.4/tests/test_data/fake_plugins/default_env_test.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/fake_plugins/default_env_test.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,28 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos.report.plugins import Plugin, IndependentPlugin - - -class DefaultEnv(Plugin, IndependentPlugin): - - plugin_name = 'default_env_test' - short_desc = 'Fake plugin to test default env var handling' - - def setup(self): - self.set_default_cmd_environment({ - 'TORVALDS': 'Linus', - 'GREATESTSPORT': 'hockey' - }) - - self.add_cmd_output( - "sh -c 'echo Does '$TORVALDS' play '$GREATESTSPORT'?'", - suggest_filename='env_var_test' - ) - - self.add_env_var(['TORVALDS', 'GREATESTSPORT']) diff -Nru sosreport-4.4/tests/test_data/fake_plugins/skip_versions.py sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/fake_plugins/skip_versions.py --- sosreport-4.4/tests/test_data/fake_plugins/skip_versions.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/fake_plugins/skip_versions.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,24 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos.report.plugins import Plugin, IndependentPlugin - - -class SkipVersions(Plugin, IndependentPlugin): - """Collect the fake version files from the test suite, to ensure proper - skipping of version files - """ - - plugin_name = 'skip_versions' - short_desc = 'fake plugin to test skipping version files via the IP parser' - - def setup(self): - self.add_copy_spec([ - '/tmp/sos-test-version.txt', - '/tmp/sos-test-version-noskip' - ]) diff -Nru sosreport-4.4/tests/test_data/fake_plugins/timeout_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/fake_plugins/timeout_test.py --- sosreport-4.4/tests/test_data/fake_plugins/timeout_test.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/fake_plugins/timeout_test.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,22 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos.report.plugins import Plugin, IndependentPlugin - - -class TimeoutTest(Plugin, IndependentPlugin): - - plugin_name = 'timeout_test' - short_desc = 'Tests timeout functionality in test suite' - plugin_timeout = 100 - - - def setup(self): - self.add_cmd_output('sleep 15') - self.add_cmd_output('echo I slept great', suggest_filename='echo_good') - self.add_cmd_output('sleep 30', timeout=10) diff -Nru sosreport-4.4/tests/test_data/foreman_setup.sh sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/foreman_setup.sh --- sosreport-4.4/tests/test_data/foreman_setup.sh 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/foreman_setup.sh 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,37 @@ +#!/bin/bash + +SUCCESS=0 + +if grep -iq centos /etc/os-release; then + if [[ `echo | awk "{print ($FOREMAN_VER >= 3.4)}"` -eq 1 ]]; then + dnf -y install https://yum.puppet.com/puppet7-release-el-8.noarch.rpm + dnf -y install https://yum.theforeman.org/releases/$FOREMAN_VER/el8/x86_64/foreman-release.rpm + dnf -y module enable foreman:el8 + else + dnf -y install https://yum.puppet.com/puppet6-release-el-8.noarch.rpm + dnf -y install https://yum.theforeman.org/releases/$FOREMAN_VER/el8/x86_64/foreman-release.rpm + dnf -y module enable ruby:2.7 + fi + dnf -y install foreman-installer && SUCCESS=1 +elif grep -iq debian /etc/os-release; then + apt-get update + apt-get -y install ca-certificates locales + sed -i 's/^# *\(en_US.UTF-8\)/\1/' /etc/locale.gen + locale-gen + export LC_ALL=en_US.UTF-8 + export LANG=en_US.UTF-8 + curl https://apt.puppet.com/puppet7-release-bullseye.deb --output /root/puppet7-release-bullseye.deb + apt-get install /root/puppet7-release-bullseye.deb + curl https://deb.theforeman.org/foreman.asc --output /etc/apt/trusted.gpg.d/foreman.asc + echo "deb http://deb.theforeman.org/ bullseye $FOREMAN_VER" | tee /etc/apt/sources.list.d/foreman.list + echo "deb http://deb.theforeman.org/ plugins $FOREMAN_VER" | tee -a /etc/apt/sources.list.d/foreman.list + apt-get update + apt-get -y install foreman-installer && SUCCESS=1 +fi + +if [[ $SUCCESS == 1 ]]; then + foreman-installer --foreman-db-password='S0Sdb=p@ssw0rd!' --foreman-initial-admin-password='S0S@dmin\\p@ssw0rd!' +else + echo "Setup failed" + exit 1 +fi diff -Nru sosreport-4.4/tests/test_data/tmp/sos-test-version-noskip sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/tmp/sos-test-version-noskip --- sosreport-4.4/tests/test_data/tmp/sos-test-version-noskip 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/tmp/sos-test-version-noskip 1970-01-01 05:30:00.000000000 +0530 @@ -1,6 +0,0 @@ -This is a test file for skipping version files with the IP parser. - -All dotted-quad strings SHOULD be changed in this file. - -10.11.12.13 -6.0.0.1 diff -Nru sosreport-4.4/tests/test_data/tmp/sos-test-version.txt sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/tmp/sos-test-version.txt --- sosreport-4.4/tests/test_data/tmp/sos-test-version.txt 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/tmp/sos-test-version.txt 1970-01-01 05:30:00.000000000 +0530 @@ -1,6 +0,0 @@ -This is a test file for skipping version files with the IP parser. - -No dotted-quad strings should be changed in this file. - -10.11.12.13 -6.0.0.1 Binary files /tmp/5urYQka9UX/sosreport-4.4/tests/test_data/var/log/binary_test.tar.xz and /tmp/NJomlL8we3/sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/var/log/binary_test.tar.xz differ diff -Nru sosreport-4.4/tests/test_data/var/log/clean_config_test.txt sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/var/log/clean_config_test.txt --- sosreport-4.4/tests/test_data/var/log/clean_config_test.txt 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/test_data/var/log/clean_config_test.txt 1970-01-01 05:30:00.000000000 +0530 @@ -1,10 +0,0 @@ -This is a test file for use with testing sos. - -The clean_config test should use this file for testing that the config section for -[clean] is loaded when `--clean` is specified on the command line. - -This line contains 'shibboleth' which should be scrubbed. - -The domain sosexample.com should also be scrubbed. Also subdomains like foobar.sosexample.com should be removed. - -The domain example.com should not be removed. diff -Nru sosreport-4.4/tests/unittests/cleaner_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/cleaner_tests.py --- sosreport-4.4/tests/unittests/cleaner_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/cleaner_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -13,10 +13,13 @@ from sos.cleaner.parsers.mac_parser import SoSMacParser from sos.cleaner.parsers.hostname_parser import SoSHostnameParser from sos.cleaner.parsers.keyword_parser import SoSKeywordParser +from sos.cleaner.parsers.ipv6_parser import SoSIPv6Parser +from sos.cleaner.parsers.username_parser import SoSUsernameParser from sos.cleaner.mappings.ip_map import SoSIPMap from sos.cleaner.mappings.mac_map import SoSMacMap from sos.cleaner.mappings.hostname_map import SoSHostnameMap from sos.cleaner.mappings.keyword_map import SoSKeywordMap +from sos.cleaner.mappings.ipv6_map import SoSIPv6Map class CleanerMapTests(unittest.TestCase): @@ -27,6 +30,7 @@ self.host_map = SoSHostnameMap() self.host_map.load_domains_from_options(['redhat.com']) self.kw_map = SoSKeywordMap() + self.ipv6_map = SoSIPv6Map() def test_mac_map_obfuscate_valid_v4(self): _test = self.mac_map.get('12:34:56:78:90:ab') @@ -96,16 +100,65 @@ _test = self.kw_map.get('foobar') self.assertEqual(_test, 'obfuscatedword0') + def test_ipv6_obfuscate_global(self): + _net = '2022:1104:abcd::' + _ob_net = self.ipv6_map.get(_net) + self.assertNotEqual(_net, _ob_net, 'Address was unchanged') + self.assertTrue(_ob_net.startswith('534f'), 'Global address does not start with identifier') + _host = '2022:1104:abcd::1234' + _ob_host = self.ipv6_map.get(_host) + self.assertNotEqual(_host, _ob_host, 'Host address was unchanged') + self.assertTrue(_host.startswith(_net), 'Host address not in network') + + def test_ipv6_link_local(self): + _test = 'fe80::1234' + _ob_test = self.ipv6_map.get(_test) + self.assertTrue(_ob_test.startswith('fe80'), 'Link-local identifier not maintained') + self.assertNotEqual(_test, _ob_test, 'Device address was unchanged') + + def test_ipv6_private(self): + _net = 'fd00:abcd::' + _host = 'fd00:abcd::1234' + _ob_net = self.ipv6_map.get(_net).split('/')[0] + _ob_host = self.ipv6_map.get(_host) + self.assertTrue(_ob_net.startswith('fd53'), 'Private network does not start with identifier') + self.assertTrue(_ob_host.startswith(_ob_net), 'Private address not in same network') + self.assertNotEqual(_net, _ob_net, 'Private network was unchanged') + + def test_ipv6_short_network(self): + _net = 'ff02::' + _ob_net = self.ipv6_map.get(_net) + self.assertTrue(_ob_net.startswith(('53', '54')), f'Short network does not start with identifier: {_ob_net}') + + def test_ipv6_consistent_obfuscation(self): + _test = '2022:1104:abcd::ef09' + _new = self.ipv6_map.get(_test) + _second = self.ipv6_map.get(_test) + self.assertEqual(_new, _second, "Same address produced two different results") + + def test_ipv6_global_no_collision(self): + """Tests that generating more than 256 global network obfuscations does + not produce any repeats""" + _nets = [] + for i in range(1, 300): + _nets.append(self.ipv6_map.get(f"f{i:03}::abcd").split('::')[0]) + # if there are any duplicates, then the length of the set will not match + self.assertTrue(len(set(_nets)) == len(_nets), "Duplicate global network obfuscations produced") + self.assertTrue(_nets[-1].startswith('54'), "First hextet of global network obfuscation over 256 not expected '54'") class CleanerParserTests(unittest.TestCase): def setUp(self): self.ip_parser = SoSIPParser(config={}) + self.ipv6_parser = SoSIPv6Parser(config={}) self.mac_parser = SoSMacParser(config={}) - self.host_parser = SoSHostnameParser(config={}, opt_domains='foobar.com') + self.host_parser = SoSHostnameParser(config={}, + opt_domains=['foobar.com']) self.kw_parser = SoSKeywordParser(config={}, keywords=['foobar']) self.kw_parser_none = SoSKeywordParser(config={}) self.kw_parser.generate_item_regexes() + self.uname_parser = SoSUsernameParser(config={}, + opt_names=['DOMAIN\myusername']) def test_ip_parser_valid_ipv4_line(self): line = 'foobar foo 10.0.0.1/24 barfoo bar' @@ -171,6 +224,18 @@ _test = self.host_parser.parse_line(line)[0] self.assertNotEqual(line, _test) + def test_obfuscate_whole_fqdn_for_given_domainname(self): + self.host_parser.load_hostname_into_map('sostestdomain.domain') + line = 'let obfuscate soshost.sostestdomain.domain' + _test = self.host_parser.parse_line(line)[0] + self.assertFalse('soshost' in _test) + self.assertFalse('sostestdomain' in _test) + + def test_hostname_no_obfuscate_underscore(self): + line = 'pam_env.so _why.not_' + _test = self.host_parser.parse_line(line)[0] + self.assertEqual(line, _test) + def test_keyword_parser_valid_line(self): line = 'this is my foobar test line' _test = self.kw_parser.parse_line(line)[0] @@ -180,3 +245,32 @@ line = 'this is my foobar test line' _test = self.kw_parser_none.parse_line(line)[0] self.assertEqual(line, _test) + + def test_ipv6_parser_strings(self): + t1 = 'testing abcd:ef01::1234 as a compressed address' + t2 = 'testing abcd:ef01::5678:1234 as a separate address' + t3 = 'testing 2607:c540:8c00:3318::34/64 as another address' + t4 = 'testing 2007:1234:5678:90ab:0987:6543:21fe:dcba as a full address' + t1_test = self.ipv6_parser.parse_line(t1)[0] + t2_test = self.ipv6_parser.parse_line(t2)[0] + t3_test = self.ipv6_parser.parse_line(t3)[0] + t4_test = self.ipv6_parser.parse_line(t4)[0] + self.assertNotEqual(t1, t1_test, f"Parser did not match and obfuscate '{t1}'") + self.assertNotEqual(t2, t2_test, f"Parser did not match and obfuscate '{t2}'") + self.assertNotEqual(t3, t3_test, f"Parser did not match and obfuscate '{t3}'") + self.assertNotEqual(t4, t4_test, f"Parser did not match and obfuscate '{t4}'") + + def test_ipv6_no_match_signature(self): + modstr = '2D:4F:6E:55:4F:E8:5E:D2:D2:A3:73:62:AB:FD:F9:C5:A5:53:31:93' + mod_test = self.ipv6_parser.parse_line(modstr)[0] + self.assertEqual(modstr, mod_test, "Parser matched module signature, and should not") + + def test_ipv6_no_match_log_false_positive(self): + logln = 'Automatically imported trusted_ca::ca from trusted_ca/ca into production' + log_test = self.ipv6_parser.parse_line(logln)[0] + self.assertEqual(logln, log_test, "IPv6 parser incorrectly matched a log line of 'trusted_ca::ca'") + + def test_ad_username(self): + line = "DOMAIN\myusername" + _test = self.uname_parser.parse_line(line)[0] + self.assertNotEqual(line, _test) diff -Nru sosreport-4.4/tests/unittests/juju/data/juju_output_sos2.json sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/data/juju_output_sos2.json --- sosreport-4.4/tests/unittests/juju/data/juju_output_sos2.json 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/data/juju_output_sos2.json 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1 @@ +{"model":{"name":"sos2","type":"iaas","controller":"local-lxc","cloud":"localhost","region":"localhost","version":"2.9.42","model-status":{"current":"available","since":"07 Apr 2023 12:54:21+08:00"},"sla":"unsupported"},"machines":{"0":{"juju-status":{"current":"started","since":"07 Apr 2023 12:57:16+08:00","version":"2.9.42"},"hostname":"juju-1cba19-0","dns-name":"10.224.139.132","ip-addresses":["10.224.139.132"],"instance-id":"juju-1cba19-0","machine-status":{"current":"running","message":"Running","since":"07 Apr 2023 12:55:16+08:00"},"modification-status":{"current":"applied","since":"07 Apr 2023 12:55:13+08:00"},"series":"focal","network-interfaces":{"eth0":{"ip-addresses":["10.224.139.132"],"mac-address":"00:16:3e:55:4b:a9","gateway":"10.224.139.1","space":"alpha","is-up":true}},"constraints":"arch=amd64","hardware":"arch=amd64 cores=0 mem=0M"},"1":{"juju-status":{"current":"started","since":"07 Apr 2023 12:57:17+08:00","version":"2.9.42"},"hostname":"juju-1cba19-1","dns-name":"10.224.139.94","ip-addresses":["10.224.139.94"],"instance-id":"juju-1cba19-1","machine-status":{"current":"running","message":"Running","since":"07 Apr 2023 12:55:16+08:00"},"modification-status":{"current":"applied","since":"07 Apr 2023 12:55:15+08:00"},"series":"focal","network-interfaces":{"eth0":{"ip-addresses":["10.224.139.94"],"mac-address":"00:16:3e:5d:d8:08","gateway":"10.224.139.1","space":"alpha","is-up":true}},"constraints":"arch=amd64","hardware":"arch=amd64 cores=0 mem=0M"}},"applications":{"ubuntu":{"charm":"ubuntu","series":"focal","os":"ubuntu","charm-origin":"charmhub","charm-name":"ubuntu","charm-rev":22,"charm-channel":"stable","exposed":false,"application-status":{"current":"active","since":"07 Apr 2023 12:57:19+08:00"},"units":{"ubuntu/0":{"workload-status":{"current":"active","since":"07 Apr 2023 12:57:19+08:00"},"juju-status":{"current":"idle","since":"07 Apr 2023 12:57:21+08:00","version":"2.9.42"},"leader":true,"machine":"0","public-address":"10.224.139.132"},"ubuntu/1":{"workload-status":{"current":"active","since":"07 Apr 2023 12:57:20+08:00"},"juju-status":{"current":"idle","since":"07 Apr 2023 12:57:22+08:00","version":"2.9.42"},"machine":"1","public-address":"10.224.139.94"}},"version":"20.04"}},"storage":{"storage":{"files/0":{"kind":"filesystem","life":"alive","status":{"current":"attached","since":"07 Apr 2023 12:57:18+08:00"},"persistent":false,"attachments":{"units":{"ubuntu/0":{"machine":"0","location":"/srv/data","life":"alive"}}}},"files/1":{"kind":"filesystem","life":"alive","status":{"current":"attached","since":"07 Apr 2023 12:57:19+08:00"},"persistent":false,"attachments":{"units":{"ubuntu/1":{"machine":"1","location":"/srv/data","life":"alive"}}}}},"filesystems":{"0/0":{"provider-id":"0/0","storage":"files/0","Attachments":{"machines":{"0":{"mount-point":"/srv/data","read-only":false,"life":"alive"}},"units":{"ubuntu/0":{"machine":"0","location":"/srv/data","life":"alive"}}},"pool":"rootfs","size":11131,"life":"alive","status":{"current":"attached","since":"07 Apr 2023 12:57:18+08:00"}},"1/1":{"provider-id":"1/1","storage":"files/1","Attachments":{"machines":{"1":{"mount-point":"/srv/data","read-only":false,"life":"alive"}},"units":{"ubuntu/1":{"machine":"1","location":"/srv/data","life":"alive"}}},"pool":"rootfs","size":11094,"life":"alive","status":{"current":"attached","since":"07 Apr 2023 12:57:19+08:00"}}}},"controller":{"timestamp":"14:16:44+08:00"}} diff -Nru sosreport-4.4/tests/unittests/juju/data/juju_output_sos.json sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/data/juju_output_sos.json --- sosreport-4.4/tests/unittests/juju/data/juju_output_sos.json 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/data/juju_output_sos.json 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1 @@ +{"model":{"name":"sos","type":"iaas","controller":"local-lxc","cloud":"localhost","region":"localhost","version":"2.9.42","model-status":{"current":"available","since":"06 Apr 2023 11:31:27+08:00"},"sla":"unsupported"},"machines":{"0":{"juju-status":{"current":"started","since":"06 Apr 2023 11:33:59+08:00","version":"2.9.42"},"hostname":"juju-38ab8b-0","dns-name":"10.224.139.234","ip-addresses":["10.224.139.234"],"instance-id":"juju-38ab8b-0","machine-status":{"current":"running","message":"Running","since":"06 Apr 2023 11:31:58+08:00"},"modification-status":{"current":"applied","since":"06 Apr 2023 11:31:56+08:00"},"series":"focal","network-interfaces":{"eth0":{"ip-addresses":["10.224.139.234"],"mac-address":"00:16:3e:52:05:7f","gateway":"10.224.139.1","space":"alpha","is-up":true}},"constraints":"arch=amd64","hardware":"arch=amd64 cores=0 mem=0M"},"2":{"juju-status":{"current":"started","since":"07 Apr 2023 14:41:09+08:00","version":"2.9.42"},"hostname":"juju-38ab8b-2","dns-name":"10.224.139.188","ip-addresses":["10.224.139.188"],"instance-id":"juju-38ab8b-2","machine-status":{"current":"running","message":"Running","since":"07 Apr 2023 14:39:09+08:00"},"modification-status":{"current":"applied","since":"07 Apr 2023 14:39:06+08:00"},"series":"focal","network-interfaces":{"eth0":{"ip-addresses":["10.224.139.188"],"mac-address":"00:16:3e:44:31:2d","gateway":"10.224.139.1","space":"alpha","is-up":true}},"constraints":"arch=amd64","hardware":"arch=amd64 cores=0 mem=0M"},"3":{"juju-status":{"current":"started","since":"07 Apr 2023 14:41:08+08:00","version":"2.9.42"},"hostname":"juju-38ab8b-3","dns-name":"10.224.139.181","ip-addresses":["10.224.139.181"],"instance-id":"juju-38ab8b-3","machine-status":{"current":"running","message":"Running","since":"07 Apr 2023 14:39:09+08:00"},"modification-status":{"current":"applied","since":"07 Apr 2023 14:39:06+08:00"},"series":"focal","network-interfaces":{"eth0":{"ip-addresses":["10.224.139.181"],"mac-address":"00:16:3e:44:80:a9","gateway":"10.224.139.1","space":"alpha","is-up":true}},"constraints":"arch=amd64","hardware":"arch=amd64 cores=0 mem=0M"},"4":{"juju-status":{"current":"started","since":"07 Apr 2023 17:01:12+08:00","version":"2.9.42"},"hostname":"juju-38ab8b-4","dns-name":"10.224.139.114","ip-addresses":["10.224.139.114"],"instance-id":"juju-38ab8b-4","machine-status":{"current":"running","message":"Running","since":"07 Apr 2023 16:59:27+08:00"},"modification-status":{"current":"applied","since":"07 Apr 2023 16:59:26+08:00"},"series":"jammy","network-interfaces":{"eth0":{"ip-addresses":["10.224.139.114"],"mac-address":"00:16:3e:0f:84:45","gateway":"10.224.139.1","space":"alpha","is-up":true}},"constraints":"arch=amd64","hardware":"arch=amd64 cores=0 mem=0M"}},"applications":{"nginx":{"charm":"nginx","series":"jammy","os":"ubuntu","charm-origin":"charmhub","charm-name":"nginx","charm-rev":6,"charm-channel":"stable","exposed":false,"application-status":{"current":"unknown","since":"07 Apr 2023 16:59:17+08:00"},"units":{"nginx/1":{"workload-status":{"current":"unknown","since":"07 Apr 2023 17:01:28+08:00"},"juju-status":{"current":"idle","since":"07 Apr 2023 17:01:28+08:00","version":"2.9.42"},"leader":true,"machine":"4","public-address":"10.224.139.114"}},"endpoint-bindings":{"":"alpha","publish":"alpha"}},"nrpe":{"charm":"nrpe","series":"jammy","os":"ubuntu","charm-origin":"charmhub","charm-name":"nrpe","charm-rev":97,"charm-channel":"stable","exposed":false,"application-status":{"current":"active","message":"Ready","since":"06 Apr 2023 11:38:44+08:00"},"relations":{"general-info":["ubuntu"]},"subordinate-to":["ubuntu"],"endpoint-bindings":{"":"alpha","general-info":"alpha","local-monitors":"alpha","monitors":"alpha","nrpe":"alpha","nrpe-external-master":"alpha"}},"ubuntu":{"charm":"ubuntu","series":"focal","os":"ubuntu","charm-origin":"charmhub","charm-name":"ubuntu","charm-rev":22,"charm-channel":"stable","exposed":false,"application-status":{"current":"active","since":"06 Apr 2023 11:34:02+08:00"},"relations":{"juju-info":["nrpe"]},"units":{"ubuntu/0":{"workload-status":{"current":"active","since":"06 Apr 2023 11:34:02+08:00"},"juju-status":{"current":"idle","since":"06 Apr 2023 11:34:04+08:00","version":"2.9.42"},"leader":true,"machine":"0","public-address":"10.224.139.234","subordinates":{"nrpe/0":{"workload-status":{"current":"active","message":"Ready","since":"06 Apr 2023 11:38:44+08:00"},"juju-status":{"current":"idle","since":"06 Apr 2023 11:34:30+08:00","version":"2.9.42"},"leader":true,"open-ports":["icmp","5666/tcp"],"public-address":"10.224.139.234"}}},"ubuntu/1":{"workload-status":{"current":"active","since":"07 Apr 2023 14:41:13+08:00"},"juju-status":{"current":"idle","since":"07 Apr 2023 14:41:15+08:00","version":"2.9.42"},"machine":"2","public-address":"10.224.139.188","subordinates":{"nrpe/2":{"workload-status":{"current":"active","message":"Ready","since":"07 Apr 2023 14:45:38+08:00"},"juju-status":{"current":"idle","since":"07 Apr 2023 14:41:41+08:00","version":"2.9.42"},"open-ports":["icmp","5666/tcp"],"public-address":"10.224.139.188"}}},"ubuntu/2":{"workload-status":{"current":"active","since":"07 Apr 2023 14:41:11+08:00"},"juju-status":{"current":"idle","since":"07 Apr 2023 14:41:13+08:00","version":"2.9.42"},"machine":"3","public-address":"10.224.139.181","subordinates":{"nrpe/1":{"workload-status":{"current":"active","message":"Ready","since":"07 Apr 2023 14:46:56+08:00"},"juju-status":{"current":"idle","since":"07 Apr 2023 14:41:41+08:00","version":"2.9.42"},"open-ports":["icmp","5666/tcp"],"public-address":"10.224.139.181"}}}}}},"storage":{"storage":{"files/0":{"kind":"filesystem","life":"alive","status":{"current":"attached","since":"06 Apr 2023 11:34:01+08:00"},"persistent":false,"attachments":{"units":{"ubuntu/0":{"machine":"0","location":"/srv/data","life":"alive"}}}},"files/1":{"kind":"filesystem","life":"alive","status":{"current":"attached","since":"07 Apr 2023 14:41:11+08:00"},"persistent":false,"attachments":{"units":{"ubuntu/1":{"machine":"2","location":"/srv/data","life":"alive"}}}},"files/2":{"kind":"filesystem","life":"alive","status":{"current":"attached","since":"07 Apr 2023 14:41:10+08:00"},"persistent":false,"attachments":{"units":{"ubuntu/2":{"machine":"3","location":"/srv/data","life":"alive"}}}}},"filesystems":{"0/0":{"provider-id":"0/0","storage":"files/0","Attachments":{"machines":{"0":{"mount-point":"/srv/data","read-only":false,"life":"alive"}},"units":{"ubuntu/0":{"machine":"0","location":"/srv/data","life":"alive"}}},"pool":"rootfs","size":12464,"life":"alive","status":{"current":"attached","since":"06 Apr 2023 11:34:01+08:00"}},"2/1":{"provider-id":"2/1","storage":"files/1","Attachments":{"machines":{"2":{"mount-point":"/srv/data","read-only":false,"life":"alive"}},"units":{"ubuntu/1":{"machine":"2","location":"/srv/data","life":"alive"}}},"pool":"rootfs","size":10062,"life":"alive","status":{"current":"attached","since":"07 Apr 2023 14:41:11+08:00"}},"3/2":{"provider-id":"3/2","storage":"files/2","Attachments":{"machines":{"3":{"mount-point":"/srv/data","read-only":false,"life":"alive"}},"units":{"ubuntu/2":{"machine":"3","location":"/srv/data","life":"alive"}}},"pool":"rootfs","size":10113,"life":"alive","status":{"current":"attached","since":"07 Apr 2023 14:41:10+08:00"}}}},"controller":{"timestamp":"17:04:14+08:00"}} diff -Nru sosreport-4.4/tests/unittests/juju/__init__.py sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/__init__.py --- sosreport-4.4/tests/unittests/juju/__init__.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/__init__.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1 @@ +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/tests/unittests/juju/juju_cluster_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/juju_cluster_tests.py --- sosreport-4.4/tests/unittests/juju/juju_cluster_tests.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/juju_cluster_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,324 @@ +# Copyright (c) 2023 Canonical Ltd., Chi Wai Chan + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. +import json +import pathlib +import unittest +from unittest.mock import call, patch + +from sos.collector.clusters.juju import _parse_option_string, juju, _get_index +from sos.options import ClusterOption + + +class MockOptions: + + def __init__(self): + self.cluster_options = [] + + +def get_juju_output(model): + dir = pathlib.Path(__file__).parent.resolve() + with open(dir / "data" / f"juju_output_{model}.json") as f: + return f.read() + + +def get_juju_status(cmd): + if "-m" in cmd: + model = cmd.split()[3] + else: + model = "sos" + + return { + "status": 0, + "output": get_juju_output(model), + } + + +def test_parse_option_string(): + result = _parse_option_string(" a,b,c") + assert result == ["a", "b", "c"] + + result = _parse_option_string() + assert result == [] + + +class JujuTest(unittest.TestCase): + """Test for juju cluster.""" + + @patch( + "sos.collector.clusters.juju.juju.exec_primary_cmd", + side_effect=get_juju_status, + ) + def test_get_nodes_no_filter(self, mock_exec_primary_cmd): + """No filter.""" + mock_opts = MockOptions() + cluster = juju( + commons={ + "tmpdir": "/tmp", + "cmdlineopts": mock_opts, + } + ) + nodes = cluster.get_nodes() + assert nodes == [] + + @patch( + "sos.collector.clusters.juju.juju.exec_primary_cmd", + side_effect=get_juju_status, + ) + def test_get_nodes_app_filter(self, mock_exec_primary_cmd): + """Application filter.""" + mock_opts = MockOptions() + mock_opts.cluster_options.append( + ClusterOption( + name="apps", + opt_type=str, + value="ubuntu", + cluster=juju.__name__, + ) + ) + cluster = juju( + commons={ + "tmpdir": "/tmp", + "cmdlineopts": mock_opts, + } + ) + nodes = cluster.get_nodes() + nodes.sort() + assert nodes == [":0", ":2", ":3"] + mock_exec_primary_cmd.assert_called_once_with( + "juju status --format json" + ) + + @patch( + "sos.collector.clusters.juju.juju.exec_primary_cmd", + side_effect=get_juju_status, + ) + def test_get_nodes_app_regex_filter(self, mock_exec_primary_cmd): + """Application filter.""" + mock_opts = MockOptions() + mock_opts.cluster_options.append( + ClusterOption( + name="apps", + opt_type=str, + value="ubuntu|nginx", + cluster=juju.__name__, + ) + ) + cluster = juju( + commons={ + "tmpdir": "/tmp", + "cmdlineopts": mock_opts, + } + ) + nodes = cluster.get_nodes() + nodes.sort() + assert nodes == [":0", ":2", ":3", ":4"] + mock_exec_primary_cmd.assert_called_once_with( + "juju status --format json" + ) + + @patch( + "sos.collector.clusters.juju.juju.exec_primary_cmd", + side_effect=get_juju_status, + ) + def test_get_nodes_model_filter_multiple_models( + self, mock_exec_primary_cmd + ): + """Multiple model filter.""" + mock_opts = MockOptions() + mock_opts.cluster_options.append( + ClusterOption( + name="models", + opt_type=str, + value="sos,sos2", + cluster=juju.__name__, + ), + ) + mock_opts.cluster_options.append( + ClusterOption( + name="apps", + opt_type=str, + value="ubuntu", + cluster=juju.__name__, + ), + ) + cluster = juju( + commons={ + "tmpdir": "/tmp", + "cmdlineopts": mock_opts, + } + ) + nodes = cluster.get_nodes() + nodes.sort() + assert nodes == [ + "sos2:0", + "sos2:1", + "sos:0", + "sos:2", + "sos:3", + ] + mock_exec_primary_cmd.assert_has_calls( + [ + call("juju status -m sos --format json"), + call("juju status -m sos2 --format json"), + ] + ) + + @patch( + "sos.collector.clusters.juju.juju.exec_primary_cmd", + side_effect=get_juju_status, + ) + def test_get_nodes_model_filter(self, mock_exec_primary_cmd): + """Model filter.""" + mock_opts = MockOptions() + mock_opts.cluster_options.append( + ClusterOption( + name="models", + opt_type=str, + value="sos", + cluster=juju.__name__, + ) + ) + mock_opts.cluster_options.append( + ClusterOption( + name="apps", + opt_type=str, + value="ubuntu", + cluster=juju.__name__, + ), + ) + cluster = juju( + commons={ + "tmpdir": "/tmp", + "cmdlineopts": mock_opts, + } + ) + nodes = cluster.get_nodes() + nodes.sort() + assert nodes == [ + "sos:0", + "sos:2", + "sos:3", + ] + mock_exec_primary_cmd.assert_has_calls( + [ + call("juju status -m sos --format json"), + ] + ) + + @patch( + "sos.collector.clusters.juju.juju.exec_primary_cmd", + side_effect=get_juju_status, + ) + def test_get_nodes_unit_filter(self, mock_exec_primary_cmd): + """Node filter.""" + mock_opts = MockOptions() + mock_opts.cluster_options.append( + ClusterOption( + name="units", + opt_type=str, + value="ubuntu/0,ubuntu/1", + cluster=juju.__name__, + ) + ) + cluster = juju( + commons={ + "tmpdir": "/tmp", + "cmdlineopts": mock_opts, + } + ) + nodes = cluster.get_nodes() + nodes.sort() + assert nodes == [":0", ":2"] + + @patch( + "sos.collector.clusters.juju.juju.exec_primary_cmd", + side_effect=get_juju_status, + ) + def test_get_nodes_machine_filter(self, mock_exec_primary_cmd): + """Machine filter.""" + mock_opts = MockOptions() + mock_opts.cluster_options.append( + ClusterOption( + name="machines", + opt_type=str, + value="0,2", + cluster=juju.__name__, + ) + ) + cluster = juju( + commons={ + "tmpdir": "/tmp", + "cmdlineopts": mock_opts, + } + ) + nodes = cluster.get_nodes() + nodes.sort() + print(nodes) + assert nodes == [":0", ":2"] + + @patch( + "sos.collector.clusters.juju.juju.exec_primary_cmd", + side_effect=get_juju_status, + ) + def test_subordinates(self, mock_exec_primary_cmd): + """Subordinate filter.""" + mock_opts = MockOptions() + mock_opts.cluster_options.append( + ClusterOption( + name="apps", + opt_type=str, + value="nrpe", + cluster=juju.__name__, + ) + ) + cluster = juju( + commons={ + "tmpdir": "/tmp", + "cmdlineopts": mock_opts, + } + ) + nodes = cluster.get_nodes() + nodes.sort() + assert nodes == [":0", ":2", ":3"] + mock_exec_primary_cmd.assert_called_once_with( + "juju status --format json" + ) + + +class IndexTest(unittest.TestCase): + + def test_subordinate_parent_miss_units(self): + """Fix if subordinate's parent is missing units.""" + model = "sos" + index = _get_index(model_name=model) + + juju_status = json.loads(get_juju_output(model=model)) + juju_status["applications"]["ubuntu"].pop("units") + + # Ensure these commands won't fall even when + # subordinate's parent's units is missing. + index.add_principals(juju_status) + index.add_subordinates(juju_status) + + def test_subordinate_miss_parent(self): + """Fix if subordinate is missing parent.""" + model = "sos" + index = _get_index(model_name=model) + + juju_status = json.loads(get_juju_output(model=model)) + index.add_principals(juju_status) + + index.apps.pop("ubuntu") + # Ensure command won't fall even when + # subordinate's parent is missing + index.add_subordinates(juju_status) + + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/tests/unittests/juju/juju_transports_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/juju_transports_test.py --- sosreport-4.4/tests/unittests/juju/juju_transports_test.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/juju/juju_transports_test.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,85 @@ +# Copyright (c) 2023 Canonical Ltd., Chi Wai Chan + +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +import subprocess +import unittest +from unittest.mock import patch + +from sos.collector.exceptions import JujuNotInstalledException +from sos.collector.transports.juju import JujuSSH + + +class MockCmdLineOpts(object): + ssh_user = "user_abc" + sudo_pw = "pw_abc" + root_password = "root_pw_abc" + + +class JujuSSHTest(unittest.TestCase): + def setUp(self): + self.juju_ssh = JujuSSH( + commons={ + "cmdlineopts": MockCmdLineOpts, + "tmpdir": "/tmp/sos-juju/", + "need_sudo": False, + }, + address="model_abc:unit_abc", + ) + + @patch("sos.collector.transports.juju.subprocess.check_output") + def test_check_juju_installed_err(self, mock_subprocess_check_output): + """Raise error if juju is not installed.""" + mock_subprocess_check_output.side_effect = ( + subprocess.CalledProcessError(returncode="127", cmd="cmd_abc") + ) + with self.assertRaises(JujuNotInstalledException): + self.juju_ssh._check_juju_installed() + + @patch("sos.collector.transports.juju.subprocess.check_output") + def test_check_juju_installed_true(self, mock_subprocess_check_output): + """Return True if juju is installed.""" + result = self.juju_ssh._check_juju_installed() + assert result + + @patch("sos.collector.transports.juju.subprocess.check_output") + def test_chmod(self, mock_subprocess_check_output): + self.juju_ssh._chmod(fname="file_abc") + mock_subprocess_check_output.assert_called_with( + f"{self.juju_ssh.remote_exec} sudo chmod o+r file_abc", + stderr=subprocess.STDOUT, + shell=True, + ) + + @patch( + "sos.collector.transports.juju.JujuSSH._check_juju_installed", + return_value=True, + ) + def test_connect(self, mock_result): + self.juju_ssh.connect(password=None) + assert self.juju_ssh.connected + + def test_remote_exec(self): + assert ( + self.juju_ssh.remote_exec == "juju ssh -m model_abc unit_abc" + ) + + @patch( + "sos.collector.transports.juju.sos_get_command_output", + return_value={"status": 0}, + ) + @patch("sos.collector.transports.juju.JujuSSH._chmod", return_value=True) + def test_retrieve_file(self, mock_chmod, mock_sos_get_cmd_output): + self.juju_ssh._retrieve_file(fname="file_abc", dest="/tmp/sos-juju/") + mock_sos_get_cmd_output.assert_called_with( + "juju scp -m model_abc -- -r unit_abc:file_abc /tmp/sos-juju/" + ) + + +# vim: set et ts=4 sw=4 : diff -Nru sosreport-4.4/tests/unittests/plugin_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/plugin_tests.py --- sosreport-4.4/tests/unittests/plugin_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/plugin_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -296,7 +296,7 @@ }) p.archive = MockArchive() p.setup() - p.collect() + p.collect_plugin() self.assertEquals(p.archive.m, {}) def test_postproc_default_on(self): @@ -358,10 +358,10 @@ self.mp.sysroot = '/' fn = create_file(2) # create 2MB file, consider a context manager self.mp.add_copy_spec(fn, 1) - content, fname, _tags = self.mp.copy_strings[0] - self.assertTrue("tailed" in fname) + fname, _size = self.mp._tail_files_list[0] + self.assertTrue(fname == fn) self.assertTrue("tmp" in fname) - self.assertEquals(1024 * 1024, len(content)) + self.assertEquals(1024 * 1024, _size) os.unlink(fn) def test_bad_filename(self): @@ -388,10 +388,9 @@ create_file(2, dir=tmpdir) create_file(2, dir=tmpdir) self.mp.add_copy_spec(tmpdir + "/*", 1) - self.assertEquals(len(self.mp.copy_strings), 1) - content, fname, _tags = self.mp.copy_strings[0] - self.assertTrue("tailed" in fname) - self.assertEquals(1024 * 1024, len(content)) + self.assertEquals(len(self.mp._tail_files_list), 1) + fname, _size = self.mp._tail_files_list[0] + self.assertEquals(1024 * 1024, _size) shutil.rmtree(tmpdir) def test_multiple_files_no_limit(self): @@ -450,7 +449,7 @@ def test_no_replacements(self): self.mp.sysroot = '/' self.mp.add_copy_spec(j("tail_test.txt")) - self.mp.collect() + self.mp.collect_plugin() replacements = self.mp.do_file_sub( j("tail_test.txt"), r"wont_match", "foobar") self.assertEquals(0, replacements) @@ -459,7 +458,7 @@ # test uses absolute paths self.mp.sysroot = '/' self.mp.add_copy_spec(j("tail_test.txt")) - self.mp.collect() + self.mp.collect_plugin() replacements = self.mp.do_file_sub( j("tail_test.txt"), r"(tail)", "foobar") self.assertEquals(1, replacements) diff -Nru sosreport-4.4/tests/unittests/policy_tests.py sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/policy_tests.py --- sosreport-4.4/tests/unittests/policy_tests.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/unittests/policy_tests.py 2023-05-26 22:32:49.000000000 +0530 @@ -7,9 +7,13 @@ # See the LICENSE file in the source distribution for further information. import unittest +from avocado.utils import distro + from sos.policies import Policy, import_policy from sos.policies.distros import LinuxPolicy -from sos.policies.package_managers import PackageManager +from sos.policies.package_managers import PackageManager, MultiPackageManager +from sos.policies.package_managers.rpm import RpmPackageManager +from sos.policies.package_managers.dpkg import DpkgPackageManager from sos.report.plugins import (Plugin, IndependentPlugin, RedHatPlugin, DebianPlugin) @@ -97,6 +101,60 @@ self.assertEquals(self.pm.pkg_by_name('foo'), None) +class RpmPackageManagerTests(unittest.TestCase): + + def setUp(self): + if distro.detect().name not in ['fedora', 'centos', 'rhel']: + self.skipTest('Not running on an RPM distribution') + self.pm = RpmPackageManager() + + def test_load_all_packages(self): + self.assertNotEquals(self.pm.packages, {}) + + def test_pkg_is_formatted(self): + kpkg = self.pm.pkg_by_name('coreutils') + self.assertIsInstance(kpkg, dict) + self.assertIsInstance(kpkg['version'], list) + self.assertEquals(kpkg['pkg_manager'], 'rpm') + + +class DpkgPackageManagerTests(unittest.TestCase): + + def setUp(self): + if distro.detect().name not in ['Ubuntu', 'debian']: + self.skipTest('Not running on a dpkg distribution') + self.pm = DpkgPackageManager() + + def test_load_all_packages(self): + self.assertNotEquals(self.pm.packages, {}) + + def test_pkg_is_formatted(self): + kpkg = self.pm.pkg_by_name('coreutils') + self.assertIsInstance(kpkg, dict) + self.assertIsInstance(kpkg['version'], list) + self.assertEquals(kpkg['pkg_manager'], 'dpkg') + + +class MultiPackageManagerTests(unittest.TestCase): + + def setUp(self): + self.pm = MultiPackageManager(primary=RpmPackageManager, + fallbacks=[DpkgPackageManager]) + + def test_load_all_packages(self): + self.assertNotEquals(self.pm.packages, {}) + + def test_pkg_is_formatted(self): + kpkg = self.pm.pkg_by_name('coreutils') + self.assertIsInstance(kpkg, dict) + self.assertIsInstance(kpkg['version'], list) + _local = distro.detect().name + if _local in ['Ubuntu', 'debian']: + self.assertEquals(kpkg['pkg_manager'], 'dpkg') + else: + self.assertEquals(kpkg['pkg_manager'], 'rpm') + + if __name__ == "__main__": unittest.main() diff -Nru sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350/clean_config_test.txt sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350/clean_config_test.txt --- sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350/clean_config_test.txt 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350/clean_config_test.txt 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,10 @@ +This is a test file for use with testing sos. + +The clean_config test should use this file for testing that the config section for +[clean] is loaded when `--clean` is specified on the command line. + +This line contains 'shibboleth' which should be scrubbed. + +The domain sosexample.com should also be scrubbed. Also subdomains like foobar.sosexample.com should be removed. + +The domain example.com should not be removed. diff -Nru sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350/rhbz1950350.py sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350/rhbz1950350.py --- sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350/rhbz1950350.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350/rhbz1950350.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,38 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + + +from sos_tests import StageTwoReportTest + + +class rhbz1950350(StageTwoReportTest): + """Ensure that when `--clean` is used with report that the config settings + from sos.conf under the [clean] section are loaded as well + + :avocado: tags=stagetwo + """ + + files = [ + ('sos.conf', '/etc/sos/sos.conf'), + ('sos_clean_config.conf', '/etc/sos/extras.d/sos_clean_config.conf'), + ('clean_config_test.txt', '/var/log/clean_config_test.txt') + ] + + sos_cmd = '-v -o sos_extras --clean' + + def test_clean_config_loaded(self): + self.assertSosLogContains("effective options now: (.*)? --clean --domains (.*)? --keywords (.*)?") + + def test_clean_config_performed(self): + self.assertFileCollected('var/log/clean_config_test.txt') + self.assertFileHasContent('var/log/clean_config_test.txt', 'The domain example.com should not be removed.') + self.assertFileNotHasContent( + 'var/log/clean_config_test.txt', + "This line contains 'shibboleth' which should be scrubbed." + ) + self.assertFileNotHasContent('var/log/clean_config_test.txt', 'sosexample.com') diff -Nru sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350/sos_clean_config.conf sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350/sos_clean_config.conf --- sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350/sos_clean_config.conf 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350/sos_clean_config.conf 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,3 @@ +# sos_extras config file to assist with the clean_config test. + +:/var/log/clean_config_test.txt diff -Nru sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350/sos.conf sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350/sos.conf --- sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350/sos.conf 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350/sos.conf 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,16 @@ +[global] +#verbose = 3 + +[report] +#skip-plugins = rpm,selinux,dovecot + +[collect] +#primary = myhost.example.com + +[clean] +keywords = shibboleth +domains = sosexample.com +no-update = true + +[plugin_options] +#rpm.rpmva = off diff -Nru sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350.py sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350.py --- sosreport-4.4/tests/vendor_tests/redhat/rhbz1950350.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz1950350.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,35 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - - -from sos_tests import StageTwoReportTest - - -class rhbz1950350(StageTwoReportTest): - """Ensure that when `--clean` is used with report that the config settings - from sos.conf under the [clean] section are loaded as well - - :avocado: tags=stagetwo - """ - - files = ['/etc/sos/sos.conf', '/etc/sos/extras.d/sos_clean_config.conf', - '/var/log/clean_config_test.txt'] - - sos_cmd = '-v -o sos_extras --clean' - - def test_clean_config_loaded(self): - self.assertSosLogContains("effective options now: (.*)? --clean --domains (.*)? --keywords (.*)?") - - def test_clean_config_performed(self): - self.assertFileCollected('var/log/clean_config_test.txt') - self.assertFileHasContent('var/log/clean_config_test.txt', 'The domain example.com should not be removed.') - self.assertFileNotHasContent( - 'var/log/clean_config_test.txt', - "This line contains 'shibboleth' which should be scrubbed." - ) - self.assertFileNotHasContent('var/log/clean_config_test.txt', 'sosexample.com') diff -Nru sosreport-4.4/tests/vendor_tests/redhat/rhbz2018033/rhbz2018033.py sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz2018033/rhbz2018033.py --- sosreport-4.4/tests/vendor_tests/redhat/rhbz2018033/rhbz2018033.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz2018033/rhbz2018033.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,35 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos_tests import StageTwoReportTest + + +class rhbz2018033(StageTwoReportTest): + """Test that control of plugin timeouts is independent of other plugin + timeouts. See #2744. + + https://bugzilla.redhat.com/show_bug.cgi?id=2018033 + + :avocado: tags=stagetwo + """ + + install_plugins = ['timeout_test'] + sos_cmd = '-vvv -o timeout_test,networking -k timeout_test.timeout=1 --plugin-timeout=123' + + def test_timeouts_separate(self): + self.assertSosUILogContains('Plugin timeout_test timed out') + self.assertSosUILogNotContains('Plugin networking timed out') + + def test_timeout_manifest_recorded(self): + testm = self.get_plugin_manifest('timeout_test') + self.assertTrue(testm['timeout_hit']) + self.assertTrue(testm['timeout'] == 1) + + netm = self.get_plugin_manifest('networking') + self.assertFalse(netm['timeout_hit']) + self.assertTrue(netm['timeout'] == 123) diff -Nru sosreport-4.4/tests/vendor_tests/redhat/rhbz2018033/timeout_test.py sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz2018033/timeout_test.py --- sosreport-4.4/tests/vendor_tests/redhat/rhbz2018033/timeout_test.py 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz2018033/timeout_test.py 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,22 @@ +# This file is part of the sos project: https://github.com/sosreport/sos +# +# This copyrighted material is made available to anyone wishing to use, +# modify, copy, or redistribute it subject to the terms and conditions of +# version 2 of the GNU General Public License. +# +# See the LICENSE file in the source distribution for further information. + +from sos.report.plugins import Plugin, IndependentPlugin + + +class TimeoutTest(Plugin, IndependentPlugin): + + plugin_name = 'timeout_test' + short_desc = 'Tests timeout functionality in test suite' + plugin_timeout = 100 + + + def setup(self): + self.add_cmd_output('sleep 15') + self.add_cmd_output('echo I slept great', suggest_filename='echo_good') + self.add_cmd_output('sleep 30', timeout=10) diff -Nru sosreport-4.4/tests/vendor_tests/redhat/rhbz2018033.py sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz2018033.py --- sosreport-4.4/tests/vendor_tests/redhat/rhbz2018033.py 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tests/vendor_tests/redhat/rhbz2018033.py 1970-01-01 05:30:00.000000000 +0530 @@ -1,35 +0,0 @@ -# This file is part of the sos project: https://github.com/sosreport/sos -# -# This copyrighted material is made available to anyone wishing to use, -# modify, copy, or redistribute it subject to the terms and conditions of -# version 2 of the GNU General Public License. -# -# See the LICENSE file in the source distribution for further information. - -from sos_tests import StageTwoReportTest - - -class rhbz2018033(StageTwoReportTest): - """Test that control of plugin timeouts is independent of other plugin - timeouts. See #2744. - - https://bugzilla.redhat.com/show_bug.cgi?id=2018033 - - :avocado: tags=stagetwo - """ - - install_plugins = ['timeout_test'] - sos_cmd = '-vvv -o timeout_test,networking -k timeout_test.timeout=1 --plugin-timeout=123' - - def test_timeouts_separate(self): - self.assertSosUILogContains('Plugin timeout_test timed out') - self.assertSosUILogNotContains('Plugin networking timed out') - - def test_timeout_manifest_recorded(self): - testm = self.get_plugin_manifest('timeout_test') - self.assertTrue(testm['timeout_hit']) - self.assertTrue(testm['timeout'] == 1) - - netm = self.get_plugin_manifest('networking') - self.assertFalse(netm['timeout_hit']) - self.assertTrue(netm['timeout'] == 123) diff -Nru sosreport-4.4/tmpfiles/tmpfilesd-sos.conf sosreport-4.5.4ubuntu0.20.04.1/tmpfiles/tmpfilesd-sos.conf --- sosreport-4.4/tmpfiles/tmpfilesd-sos.conf 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tmpfiles/tmpfilesd-sos.conf 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,4 @@ +# Cleaning the contents of extracted sosreport directories. +# +# Type Path Mode User Group Age Argument +e /var/tmp/sosreport* - - - 30d diff -Nru sosreport-4.4/tmpfiles/tmpfilesd-sos-rh.conf sosreport-4.5.4ubuntu0.20.04.1/tmpfiles/tmpfilesd-sos-rh.conf --- sosreport-4.4/tmpfiles/tmpfilesd-sos-rh.conf 1970-01-01 05:30:00.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tmpfiles/tmpfilesd-sos-rh.conf 2023-05-26 22:32:49.000000000 +0530 @@ -0,0 +1,4 @@ +# Ignore the contents of extracted sosreport directories. +# +# Type Path Mode User Group Age Argument +x /var/tmp/sos* diff -Nru sosreport-4.4/tmpfilesd-sos.conf sosreport-4.5.4ubuntu0.20.04.1/tmpfilesd-sos.conf --- sosreport-4.4/tmpfilesd-sos.conf 2022-08-16 01:37:50.000000000 +0530 +++ sosreport-4.5.4ubuntu0.20.04.1/tmpfilesd-sos.conf 1970-01-01 05:30:00.000000000 +0530 @@ -1,4 +0,0 @@ -# Cleaning the contents of extracted sosreport directories. -# -# Type Path Mode User Group Age Argument -e /var/tmp/sosreport* - - - 30d