From 0d6d139d72ad14634c20839754a6991f7aa6aaf8 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 18 Aug 2025 17:31:14 +0200 Subject: [PATCH 1/6] Checks: Fix coverage upload for unit tests on given Python version --- .github/workflows/main.yml | 95 ++++++++------------------------------ pytype_runner.py | 4 ++ tox.ini | 6 ++- 3 files changed, 27 insertions(+), 78 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 90403a09..ff30a9fa 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -35,6 +35,8 @@ jobs: - python-version: '3.13' os: ubuntu-22.04 runs-on: ${{ matrix.os }} + env: + COV_UPLOAD: ${{ '3.11' }} steps: - uses: actions/checkout@v4 with: @@ -52,93 +54,34 @@ jobs: if: ${{ github.actor == 'nektos/act'}} run: apt-get update && apt-get install -y cpio - - name: Run of tox on ubuntu-latest - if: ${{ startsWith(matrix.python-version, '3.') && matrix.python-version != 3.6 }} + - name: Run tox to run pytest in the defined tox environments run: | - pip install 'virtualenv<20.22' 'tox==4.5.1' tox-gh-actions + pip install tox-gh-actions tox --workdir .github/workflows/.tox --recreate - - name: Select the coverage file for upload - if: | - ( matrix.python-version == '3.6' || matrix.python-version == '3.11' ) && - ( !cancelled() && github.actor != 'nektos/act' ) - id: coverage - run: mv $( ls -t .github/workflows/.tox/*/log/.coverage | head -1 ) .coverage - - # The new reliable Codecov upload requires Codecov to query the GitHub API to check - # the repo and the commit. The repo (or organisation) owner needs to login to - # codecov, generated the CODECOV_TOKEN and save it as a secret in the ORG or the repo: - # https://docs.codecov.com/docs/adding-the-codecov-token - - # Links to get and set the token: - # Get the CODECOV_TOKEN: https://app.codecov.io/gh/xenserver/python-libs/settings - # Set the CODE_COV_TOKEN: https://github.com/xenserver/python-libs/settings/secrets/actions - - # Without it, the API calls are rate-limited by GitHub, and the upload may fail: - # https://github.com/codecov/feedback/issues/126#issuecomment-1932658904 - # - - name: Upload coverage reports to Codecov (fallback, legacy Node.js 16 action) - # If CODECOV_TOKEN is not set, use the legacy tokenless Codecov action: + - name: Upload coverage reports to Codecov env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - # To reduce chances of GitHub's API throttling to hit this upload, only run the - # upload for the py38-covcombine-check job running on Ubuntu-20.04, which is the - # one we need. And only run it for PRs and the master branch, not for pushes. - # This reduces the number of uploads and the chance of hitting the rate limit - # by a factor of 6. + id: codecov if: | - steps.coverage.outcome == 'success' && - !env.CODECOV_TOKEN && !cancelled() && - matrix.os == 'ubuntu-20.04' && github.actor != 'nektos/act' && - ( github.event.pull_request.number || github.ref == 'refs/heads/master' ) - uses: codecov/codecov-action@v3 + env.CODECOV_TOKEN && !cancelled() && github.actor != 'nektos/act' && + matrix.python-version == env.COV_UPLOAD + uses: codecov/codecov-action@v5 with: - directory: .github/workflows/.tox/py38-covcombine-check/log - env_vars: OS,PYTHON - # Use fail_ci_if_error: false as explained the big comment above: - # Not failing this job in this case is ok because the tox CI checks also contain - # a diff-cover check which would fail on changed lines missing coverage. - # The Codecov CLI is more reliable and should be used if the CODECOV_TOKEN is set. - # The Codecov CLI is used in the next step when CODECOV_TOKEN is set. - fail_ci_if_error: false - flags: unittest - name: py27-py38-combined - verbose: true + token: ${{ secrets.CODECOV_TOKEN }} - - name: Upload coverage reports to Codecov (used when secrets.CODECOV_TOKEN is set) - # If CODECOV_TOKEN is set, use the new Codecov CLI to upload the coverage reports - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - if: | - env.CODECOV_TOKEN && !cancelled() && github.actor != 'nektos/act' && - steps.coverage.outcome == 'success' && matrix.os == 'ubuntu-20.04' - run: > - set -euxv; - mv .github/workflows/.tox/py38-covcombine-check/log/coverage.xml cov.xml; - curl -O https://cli.codecov.io/latest/linux/codecov; sudo chmod +x codecov; - ./codecov upload-process --report-type coverage - --name "CLI Upload for ${{ env.PYTHON_VERSION }}" - --git-service github --fail-on-error --file cov.xml --disable-search - --flag python${{ env.PYTHON_VERSION }} - continue-on-error: false # Fail the job if the upload with CODECOV_TOKEN fails + - uses: codecov/test-results-action@v1 + if: ${{ !cancelled() && steps.codecov.outcome == 'success' }} + with: + token: ${{ secrets.CODECOV_TOKEN }} - - if: steps.coverage.outcome == 'success' + - if: | + matrix.python-version == env.COV_UPLOAD && + github.actor != 'nektos/act' && + !cancelled() name: Upload coverage reports to Coveralls env: - COVERALLS_PARALLEL: true COVERALLS_FLAG_NAME: ${{ format('python{0}', steps.python.outputs.python-version ) }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: pip install coveralls && coveralls --service=github - - # For combined coverage of 2.7, 3.8 and 3.11 we upload to Coveralls in parallel mode. - # To view the Coveralls results from the PR, click on the "Details" link to the right - # of the Coveralls Logo in the Checks section of the PR. - finish-coverage-upload: - if: github.actor != 'nektos/act' - needs: test - runs-on: ubuntu-latest - steps: - - name: Finish the coverage upload to Coveralls - uses: coverallsapp/github-action@v1 - with: - parallel-finished: true + continue-on-error: true # Coveralls.io is currently overloaded diff --git a/pytype_runner.py b/pytype_runner.py index 9625736f..ea083d25 100755 --- a/pytype_runner.py +++ b/pytype_runner.py @@ -65,6 +65,8 @@ def skip_uninteresting_lines(line: str) -> bool: def run_pytype(command: List[str], branch_url: str, errorlog: TextIO, results): + if os.environ.get("GITHUB_STEP_SUMMARY", None): + print("::group::pytype-output") info(" ".join(shlex.quote(arg) for arg in command)) # When run in tox, pytype dumps debug messages to stderr. Point stderr to /dev/null: popen = Popen(command, stdout=PIPE, stderr=PIPE, universal_newlines=True) @@ -108,6 +110,8 @@ def run_pytype(command: List[str], branch_url: str, errorlog: TextIO, results): if popen.stdout: popen.stdout.close() popen.wait() + if os.environ.get("GITHUB_STEP_SUMMARY", None): + print("::endgroup::") return popen.returncode, results diff --git a/tox.ini b/tox.ini index 535d80ec..cf9abbe1 100644 --- a/tox.ini +++ b/tox.ini @@ -33,7 +33,9 @@ commands = # https://github.com/actions/toolkit/blob/main/docs/problem-matchers.md # https://github.com/actions/toolkit/blob/main/docs/commands.md#problem-matchers echo "::add-matcher::.github/workflows/PYTHONWARNINGS-problemMatcher.json" - pytest --cov -v --new-first -x --show-capture=all -rA + sh -c 'if [ -n "{env:GITHUB_STEP_SUMMARY:-}" ];then echo "::group::pytest";fi' + pytest --cov -v --new-first -x --show-capture=all -rA --junitxml={envlogdir}/junit.xml -o junit_family=legacy + sh -c 'if [ -n "{env:GITHUB_STEP_SUMMARY:-}" ];then echo "::endgroup::";fi' sh -c 'if [ -n "{env:PYTEST_MD_REPORT_OUTPUT}" -a -n "{env:GITHUB_STEP_SUMMARY}" ];then \ mkdir -p $(dirname "{env:GITHUB_STEP_SUMMARY:.git/sum.md}"); \ sed "s/tests\(.*py\)/[&](&)/" \ @@ -114,7 +116,7 @@ commands = # covcombine shall not call [cov]commands: diff-cover shall check the combined cov: {cov,covcp}: {[cov]commands} {py27-test}: pylint --py3k --disable=no-absolute-import xcp/ - covcp: cp -av {envlogdir}/coverage.xml {env:UPLOAD_DIR:.} + covcp: cp -av {envlogdir}/coverage.xml {envlogdir}/junit.xml {env:UPLOAD_DIR:.} covcombine: {[covcombine]commands} fox: {[covcombine]commands} fox: {[lint]commands} From e3adf0bab3d919390e573f05f98e5687e510d6a7 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 18 Aug 2025 12:00:00 +0200 Subject: [PATCH 2/6] Add .github/workflows/reviewdog-review.yml Signed-off-by: Bernhard Kaindl --- .github/workflows/reviewdog-review.yml | 62 ++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 .github/workflows/reviewdog-review.yml diff --git a/.github/workflows/reviewdog-review.yml b/.github/workflows/reviewdog-review.yml new file mode 100644 index 00000000..b110f4c0 --- /dev/null +++ b/.github/workflows/reviewdog-review.yml @@ -0,0 +1,62 @@ +name: Reviewdog PR Review comments + +# +# The reviewdog steps use reporter: github-pr-review, which submits the results +# as a review comment on the pull request. It needs a GitHub token with +# public_repo scope to post the comments and can only be used in the context +# of a pull request. +# +on: pull_request + +# +# Checks can be skipped by adding "skip-checks: true" to a commit message, +# or requested by adding "request-checks: true" if disabled by default for pushes: +# https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/collaborating-on-repositories-with-code-quality-features/about-status-checks#skipping-and-requesting-checks-for-individual-commits +# + +concurrency: # On new workflow, cancel old workflows from the same PR, branch or tag: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + reviewdog: + runs-on: ubuntu-24.04 + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.REVIEWDOG_GITHUB_API_TOKEN }} + GITHUB_ACTOR: ${{ github.actor }} + steps: + - uses: actions/checkout@v4 + id: checkout + if: env.REVIEWDOG_GITHUB_API_TOKEN && env.GITHUB_ACTOR != 'nektos/act' + + - uses: actions/setup-python@v5 + if: env.REVIEWDOG_GITHUB_API_TOKEN && env.GITHUB_ACTOR != 'nektos/act' + with: + python-version: 3.13 + + - name: Install uv and activate the environment + if: env.REVIEWDOG_GITHUB_API_TOKEN && env.GITHUB_ACTOR != 'nektos/act' + uses: astral-sh/setup-uv@v6 + with: + activate-environment: true + + - run: uv pip install pylint types-setuptools -r pyproject.toml --extra mypy + if: env.REVIEWDOG_GITHUB_API_TOKEN && env.GITHUB_ACTOR != 'nektos/act' + + - uses: tsuyoshicho/action-mypy@v4 + name: Run mypy with reviewdog to submit GitHub checks for warnings + if: env.REVIEWDOG_GITHUB_API_TOKEN && env.GITHUB_ACTOR != 'nektos/act' + with: + install_types: false + mypy_flags: --exclude python-libs-*/stubs/ + reporter: github-pr-review + level: warning + github_token: ${{ secrets.REVIEWDOG_GITHUB_API_TOKEN }} + + - uses: dciborow/action-pylint@0.1.0 + name: Run pylint with reviewdog to submit GitHub checks for warnings + if: env.REVIEWDOG_GITHUB_API_TOKEN && env.GITHUB_ACTOR != 'nektos/act' + with: + reporter: github-pr-review + glob_pattern: "xcp tests" + github_token: ${{ secrets.REVIEWDOG_GITHUB_API_TOKEN }} From eaecfc5dcaad2c4415414ad1f5bb007862cbbaed Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 18 Aug 2025 12:00:00 +0200 Subject: [PATCH 3/6] Let reviewdog report uncovered lines Signed-off-by: Bernhard Kaindl --- .github/workflows/main.yml | 14 ++++++++++++++ README.md | 2 +- tox.ini | 14 +++++++++----- 3 files changed, 24 insertions(+), 6 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ff30a9fa..6f3121e1 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -58,6 +58,20 @@ jobs: run: | pip install tox-gh-actions tox --workdir .github/workflows/.tox --recreate + env: + DIFF_COVERAGE_MIN: 0 # Let the reviewdog and codecov show uncovered lines + + - uses: aki77/reviewdog-action-code-coverage@v2 + continue-on-error: true + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.REVIEWDOG_GITHUB_API_TOKEN }} + if: + env.REVIEWDOG_GITHUB_API_TOKEN && + matrix.python-version == env.COV_UPLOAD && + github.actor != 'nektos/act' + with: + lcov_path: coverage.lcov + github_token: ${{ secrets.REVIEWDOG_GITHUB_API_TOKEN }} - name: Upload coverage reports to Codecov env: diff --git a/README.md b/README.md index b7e5c133..06aa384a 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Common XenServer/XCP-ng Python classes [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit)](https://github.com/pre-commit/pre-commit) -[![](https://img.shields.io/badge/python-2.7_%7C_3.6_%7C_3.7_%7C_3.8_%7C_3.9_%7C_3.10_%7C_3.11+-blue.svg)](https://www.python.org/downloads/) +[![](https://img.shields.io/badge/python-3.6_%7C_3.10_%7C_3.11_%7C_3.12_%7C_3.13-blue.svg)](https://www.python.org/downloads/) [![codecov](https://codecov.io/gh/xenserver/python-libs/branch/master/graph/badge.svg?token=6WKVLDXJFN)](https://codecov.io/gh/xenserver/python-libs) [![](https://img.shields.io/badge/License-BSD--2--Cause%20%26%20MIT-brightgreen)](https://github.com/xenserver/python-libs/blob/master/LICENSE) diff --git a/tox.ini b/tox.ini index cf9abbe1..f2c11e68 100644 --- a/tox.ini +++ b/tox.ini @@ -83,6 +83,9 @@ passenv = pytype: GITHUB_REF_NAME test: PYTEST_ADDOPTS test: PYTEST_XDIST_WORKER_COUNT + cov: TESTS_COVERAGE_MIN + cov: XCP_COVERAGE_MIN + cov: DIFF_COVERAGE_MIN covcp: UPLOAD_DIR covcp: HOME check: MYPY_FORCE_COLOR @@ -116,7 +119,7 @@ commands = # covcombine shall not call [cov]commands: diff-cover shall check the combined cov: {cov,covcp}: {[cov]commands} {py27-test}: pylint --py3k --disable=no-absolute-import xcp/ - covcp: cp -av {envlogdir}/coverage.xml {envlogdir}/junit.xml {env:UPLOAD_DIR:.} + covcp: cp -av {envlogdir}/coverage.xml {envlogdir}/coverage.lcov {envlogdir}/junit.xml {env:UPLOAD_DIR:.} covcombine: {[covcombine]commands} fox: {[covcombine]commands} fox: {[lint]commands} @@ -129,12 +132,13 @@ setenv = PY3_DIFFCOVER_OPTIONS=--ignore-whitespace --show-uncovered extras = coverage test commands = - coverage xml -o {envlogdir}/coverage.xml --fail-under {env:XCP_COV_MIN:68} + coverage xml -o {envlogdir}/coverage.xml --fail-under {env:XCP_COVERAGE_MIN:78} + coverage lcov -o {envlogdir}/coverage.lcov coverage html -d {envlogdir}/htmlcov - coverage html -d {envlogdir}/htmlcov-tests --fail-under {env:TESTS_COV_MIN:96} \ + coverage html -d {envlogdir}/htmlcov-tests --fail-under {env:TESTS_COVERAGE_MIN:96} \ --include="tests/*" - diff-cover --compare-branch=origin/master \ - {env:PY3_DIFFCOVER_OPTIONS} --fail-under {env:DIFF_COV_MIN:92} \ + diff-cover --compare-branch=origin/master --exclude xcp/dmv.py \ + {env:PY3_DIFFCOVER_OPTIONS} --fail-under {env:DIFF_COVERAGE_MIN:92} \ --html-report {envlogdir}/coverage-diff.html \ {envlogdir}/coverage.xml From 350c62ceb79789526bbb7f63d8e462363a27628c Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 18 Aug 2025 12:00:00 +0200 Subject: [PATCH 4/6] Add configuration for codecov.io Signed-off-by: Bernhard Kaindl --- .codecov.yml | 108 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 108 insertions(+) create mode 100644 .codecov.yml diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 00000000..0319a5b2 --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,108 @@ +# For more configuration details: +# https://docs.codecov.io/docs/codecov-yaml + +# After making edits, check if this file is valid by running: +# curl -X POST --data-binary @.codecov.yml https://codecov.io/validate + +# +# Coverage configuration +# ---------------------- +# +github_checks: + # + # On adding coverage annotations to the code in the GitHub + # Code Review for now: + # + # - The annotations consume a lot of space in the PR code review, + # and can make it hard to review files that are not covered yet. + # + # - The coverage can be visited using the Codecov link at all times. + # https://app.codecov.io/gh/xapi-project/xen-api/pulls + # + # - The annotations can be hidden in GitHub PR code review by + # pressing the "a" key or by deselecting the "Show comments" + # checkbox but they are shown by default. + # + # - The Codecov Chrome and Firefox extension is a much nicer + # way to indicate coverage: + # + # Link: https://github.com/codecov/codecov-browser-extension + # + # - How to enable: You need to log in to Codecov using Github. + # For Firefox, enable the needed permissions: + # https://github.com/codecov/codecov-browser-extension/issues/50 + # + # Reference: + # http://docs.codecov.com/docs/common-recipe-list#disable-github-check-run-annotations + # + annotations: true + +# +# Pull request comments: +# ---------------------- +# This feature adds the code coverage summary as a comment on each PR. +# See https://docs.codecov.io/docs/pull-request-comments +# This same information is available from the Codecov checks in the PR's +# "Checks" tab in GitHub even when this feature is disabled. +# +comment: + # + # Legend: + # "diff" is the Coverage Diff of the pull request. + # "files" are the files impacted by the pull request + # "flags" are the coverage status of the pull request + # + # For an even shorter layout, this may be used: + # layout: "condensed_header, diff, files, flags" + # + layout: "header, diff, files, flags" + + # + # Only add the Codecov comment to the PR when coverage changes + # + require_changes: true + + # + # The overall project coverage is secondary to the individual coverage + # and it is always shown in the repository at: + # - https://app.codecov.io/gh/xenserver/python-libs + # + hide_project_coverage: true + +coverage: + # + # Number of precision digits when showing coverage percentage e.g. 88.8%. + # One precision digit is also used by coverage.py when reporting coverage: + # + precision: 1 + + status: + + # + # Patch coverage is the incremental change in coverage in a PR + # + patch: + default: false # disable the default status that measures entire project + + tests: + paths: ["tests/"] # only include coverage in "tests/" folder + target: auto # don't reduce coverage on test code lines + + python-libs: # declare a new status context "python-libs" + paths: ["xcp/"] # library code + target: 0 # Temporarily allow 0% coverage to allow to merge dmv.py, + # Project threshold sets a lower bound to not go further. + + # + # Project coverage is the absolute coverage of the entire project + # + project: + default: false # disable the default status that measures entire project + + tests: # declare a new status context "tests" + paths: ["tests/"] # only include coverage in "tests/" folder + target: 99% # we always want 99% coverage here + + python-libs: # declare a new status context "python-libs" + paths: ["xcp/"] # library code + target: 78% # Coverage should not be reduced compared to its base From 2e3f6efa72eb69ba924db126040501f663084584 Mon Sep 17 00:00:00 2001 From: Chunjie Zhu Date: Wed, 6 Aug 2025 18:30:06 +0800 Subject: [PATCH 5/6] CP-54481: support DMV RPU plugin move DMV common code to python-libs because RPU plugin calls these functions as well Signed-off-by: Chunjie Zhu --- xcp/dmv.py | 362 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 362 insertions(+) create mode 100644 xcp/dmv.py diff --git a/xcp/dmv.py b/xcp/dmv.py new file mode 100644 index 00000000..08d31dd6 --- /dev/null +++ b/xcp/dmv.py @@ -0,0 +1,362 @@ +# Copyright (c) 2025, Citrix Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os +import subprocess +import json +import re +import struct +import glob +import errno + +from .compat import open_with_codec_handling + +dmv_proto_ver = 0.1 +err_proto_ver = 0.1 + +def get_all_kabi_dirs(): + """Return a list of (kabi_ver, updates_dir, dmv_dir) tuples for all kernel versions.""" + modules_root = "/lib/modules/" + dirs = [] + for kabi_ver in os.listdir(modules_root): + updates_dir = os.path.join(modules_root, kabi_ver, "updates") + dmv_dir = os.path.join(modules_root, kabi_ver, "dmv") + # not checking if updates_dir and dmv_dir exist here, will check later when use them + dirs.append((kabi_ver, updates_dir, dmv_dir)) + return dirs + +def note_offset(var_len): + """Note section has 4 bytes padding""" + ret = (((var_len - 1) & ~3) + 4) - var_len + return ret + +def get_active_variant(modules): + """Check and report active driver""" + # Check if any module in the modules is loaded + for module in modules: + # get 'module' from 'module.ko' + module_name = os.path.splitext(module)[0] + note_file = os.path.join("/sys/module", module_name, "notes/.note.XenServer") + if not os.path.isfile(note_file): + continue + + note_struct_size = struct.calcsize('III') + with open(note_file, "rb") as n_file: + for _ in range(3): + note_hdr = struct.unpack('III', n_file.read(note_struct_size)) + n_file.read(note_offset(note_struct_size)) + vendor = n_file.read(note_hdr[0]) + n_file.read(note_offset(note_hdr[0])) + content = n_file.read(note_hdr[1])[:-1] + n_file.read(note_offset(note_hdr[1])) + note_type = note_hdr[2] + if vendor == b'XenServer' and note_type == 1: + variant = content.decode("ascii") + return variant + return None + +def get_loaded_modules(modules): + """Return all loaded modules""" + loaded_modules = [] + for module in modules: + # get 'module' from 'module.ko' + module_name = os.path.splitext(module)[0] + note_file = os.path.join("/sys/module", module_name, "notes/.note.XenServer") + if os.path.isfile(note_file): + loaded_modules.append(module) + return loaded_modules + +def id_matches(id1, id2): + if '*' in [id1, id2]: + return True + return id1 == id2 + +''' +driver_pci_ids example: +{ + "abc.ko": [ + { + "vendor_id": "14e4", + "device_id": "163c", + "subvendor_id": "*", + "subdevice_id": "*" + }, + { + "vendor_id": "14e4", + "device_id": "163b", + "subvendor_id": "*", + "subdevice_id": "*" + }], + "de.ko": [ + { + "vendor_id": "eees", + "device_id": "163c", + "subvendor_id": "*", + "subdevice_id": "*" + }, + { + "vendor_id": "14f4", + "device_id": "16db", + "subvendor_id": "2123", + "subdevice_id": "1123" + }] +} +''' +def pci_matches(present_pci_id, driver_pci_ids): + """Check if present PCI ID matches any of the driver PCI IDs.""" + merged_driver_pci_id_list = [] + for module_pci_list in driver_pci_ids.values(): + for item in module_pci_list: + merged_driver_pci_id_list.append(item) + + for pci_id in merged_driver_pci_id_list: + if (id_matches(present_pci_id['vendor'], pci_id['vendor_id']) and + id_matches(present_pci_id['device'], pci_id['device_id']) and + id_matches(present_pci_id['subvendor'], pci_id['subvendor_id']) and + id_matches(present_pci_id['subdevice'], pci_id['subdevice_id'])): + return True + return False + +def hardware_present(lspci_out, pci_ids): + """Check if supported hardware is fitted""" + if not pci_ids or not lspci_out: + return False + + # 'lspci -nm' output: + # 00:15.3 "0604" "15ad" "07a0" -r01 -p00 "15ad" "07a0" + # 00:01.0 "0604" "8086" "7191" -r01 -p00 "" "" + lspci_expression = r''' + ^ + (?P\S+) # PCI slot (00:15.3) + \s+ + "(?P[^"]*)" # Device class (0604) + \s+ + "(?P[^"]*)" # Vendor (15ad) + \s+ + "(?P[^"]*)" # Device name (07a0) + \s* + (?:-(?P\S+))? # Optional revision (-r01) + \s* + (?:-(?P\S+))? # Optional programming interface (-p00) + \s+ + "(?P[^"]*)" # Subvendor (15ad or empty) + \s+ + "(?P[^"]*)" # Subdevice (07a0 or empty) + $ + ''' + lscpi_pattern = re.compile(lspci_expression, re.VERBOSE | re.MULTILINE) + for match in lscpi_pattern.finditer(lspci_out): + if pci_matches(match.groupdict(), pci_ids): + return True + return False + +def variant_selected(modules, updates_dir): + """Check and return which driver is selected""" + # Check if any module in the modules is selected + for module in modules: + slink_file = os.path.join(updates_dir, module) + if os.path.islink(slink_file): + module_path = os.path.realpath(slink_file) + module_dir = os.path.dirname(module_path) + info_file = os.path.join(module_dir, "info.json") + with open(info_file, "r", encoding="ascii") as json_file: + json_data = json.load(json_file) + variant = json_data["variant"] + + return variant + return None + +class DriverMultiVersion(object): + def __init__(self, updates_dir, lspci_out, runtime=False): + self.updates_dir = updates_dir + self.lspci_out = lspci_out + self.runtime = runtime + + def variant_selected(self, modules): + """Check and return which driver is selected""" + # Check if any module in the modules is selected + for module in modules: + slink_file = os.path.join(self.updates_dir, module) + if os.path.islink(slink_file): + module_path = os.path.realpath(slink_file) + module_dir = os.path.dirname(module_path) + info_file = os.path.join(module_dir, "info.json") + with open(info_file, "r", encoding="ascii") as json_file: + json_data = json.load(json_file) + variant = json_data["variant"] + + return variant + return None + + def parse_dmv_info(self, fpath): + """Populate dmv list with information""" + json_data = None + with open_with_codec_handling(fpath, encoding="ascii") as json_file: + json_data = json.load(json_file) + json_formatted = { + "type": json_data["category"], + "friendly_name": json_data["name"], + "description": json_data["description"], + "info": json_data["name"], + "variants": { + json_data["variant"]: { + "version": json_data["version"], + "hardware_present": hardware_present( + self.lspci_out.stdout, + json_data["pci_ids"]), + "priority": json_data["priority"], + "status": json_data["status"]}}} + if self.runtime: + json_formatted["selected"] = self.variant_selected( + json_data["pci_ids"].keys()) + json_formatted["active"] = get_active_variant( + json_data["pci_ids"].keys()) + json_formatted["loaded modules"] = get_loaded_modules( + json_data["pci_ids"].keys()) + return json_data, json_formatted + +class DriverMultiVersionManager(object): + def __init__(self, runtime=False): + self.runtime = runtime + self.dmv_list = { + "protocol": {"version": dmv_proto_ver}, + "operation": {"reboot": False}, + "drivers": {} + } + self.errors_list = { + "version": err_proto_ver, + "exit_code": 0, + "message": "Success" + } + + def merge_jsondata(self, oldone, newone): + variants = oldone["variants"] + for k, v in newone["variants"].items(): + variants[k] = v + + json_formatted = { + "type": oldone["type"], + "friendly_name": oldone["friendly_name"], + "description": oldone["description"], + "info": oldone["info"], + "variants": variants} + + if self.runtime: + selected = None + if oldone["selected"] is not None: + selected = oldone["selected"] + elif newone["selected"] is not None: + selected = newone["selected"] + json_formatted["selected"] = selected + + active = None + if oldone["active"] is not None: + active = oldone["active"] + elif newone["active"] is not None: + active = newone["active"] + json_formatted["active"] = active + + loaded = oldone["loaded modules"] + newone["loaded modules"] + json_formatted["loaded modules"] = loaded + + self.dmv_list["drivers"][oldone["info"]] = json_formatted + + def process_dmv_data(self, json_data, json_formatted): + if not json_data["name"] in self.dmv_list["drivers"]: + self.dmv_list["drivers"][json_data["name"]] = json_formatted + elif self.dmv_list["drivers"][json_data["name"]] is None: + self.dmv_list["drivers"][json_data["name"]] = json_formatted + else: + self.merge_jsondata(self.dmv_list["drivers"][json_data["name"]], json_formatted) + + def parse_dmv_list(self): + lspci_out = subprocess.run(["lspci", '-nm'], stdout=subprocess.PIPE, + stderr=subprocess.PIPE, universal_newlines=True, + check=True) + for _, updates_dir, dmv_dir in get_all_kabi_dirs(): + if not os.path.isdir(dmv_dir): + continue + + for path, _, files in os.walk(dmv_dir): + if "info.json" not in files: + continue + + fpath = os.path.join(path, "info.json") + d = DriverMultiVersion(updates_dir, lspci_out, self.runtime) + json_data, json_formatted = d.parse_dmv_info(fpath) + self.process_dmv_data(json_data, json_formatted) + + def parse_dmv_file(self, fpath): + lspci_out = subprocess.run(["lspci", '-nm'], stdout=subprocess.PIPE, + stderr=subprocess.PIPE, universal_newlines=True, + check=True) + d = DriverMultiVersion("", lspci_out) + json_data, json_formatted = d.parse_dmv_info(fpath) + self.process_dmv_data(json_data, json_formatted) + + def get_dmv_list(self): + return self.dmv_list + + def create_dmv_symlink(self, name, ver): + created = False + for _, updates_dir, dmv_dir in get_all_kabi_dirs(): + module_dir = os.path.join(dmv_dir, name, ver) + module_files = glob.glob(os.path.join(module_dir, "**", "*.ko"), recursive=True) + for module_file in module_files: + # updates_dir may not exist + os.makedirs(updates_dir, exist_ok=True) + module_sym = os.path.join(updates_dir, os.path.basename(module_file)) + tmp_name = module_sym + ".tmp" + try: + os.unlink(tmp_name) + except FileNotFoundError: + pass + os.symlink(module_file, tmp_name) + os.rename(tmp_name, module_sym) + created = True + modules = [module_sym] + input_data = "\n".join(modules) + "\n" + subprocess.run( + ["/usr/sbin/weak-modules", "--no-initramfs", "--add-modules"], + input=input_data, + text=True, + check=True + ) + if created: + subprocess.run(["/usr/sbin/depmod", "-a"], check=True) + uname_r = subprocess.run(["uname", '-r'], stdout=subprocess.PIPE, text=True, + check=True).stdout.strip() + if os.path.exists("/usr/bin/dracut"): + initrd_img = "/boot/initrd-" + uname_r + ".img" + subprocess.run(["/usr/bin/dracut", "-f", initrd_img, uname_r], check=True) + return True + self.errors_list["exit_code"] = errno.ENOENT + self.errors_list["message"] = os.strerror(errno.ENOENT) + return False + + def get_dmv_error(self): + return self.errors_list + + def set_dmv_error(self, errcode): + self.errors_list["exit_code"] = errcode + self.errors_list["message"] = os.strerror(errcode) From 3be08ccf40711582ba9ac56b7b464d5e9f7c23eb Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 18 Aug 2025 12:00:00 +0200 Subject: [PATCH 6/6] xcp/dmv.py: Fixup CI errors Signed-off-by: Bernhard Kaindl --- xcp/dmv.py | 74 ++++++++++++++++++++++++++++-------------------------- 1 file changed, 38 insertions(+), 36 deletions(-) diff --git a/xcp/dmv.py b/xcp/dmv.py index 08d31dd6..4c80cbda 100644 --- a/xcp/dmv.py +++ b/xcp/dmv.py @@ -21,13 +21,14 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -import os -import subprocess +import errno +import glob import json +import os import re import struct -import glob -import errno +import subprocess +from typing import Any, Dict from .compat import open_with_codec_handling @@ -91,37 +92,36 @@ def id_matches(id1, id2): return True return id1 == id2 -''' -driver_pci_ids example: -{ - "abc.ko": [ - { - "vendor_id": "14e4", - "device_id": "163c", - "subvendor_id": "*", - "subdevice_id": "*" - }, - { - "vendor_id": "14e4", - "device_id": "163b", - "subvendor_id": "*", - "subdevice_id": "*" - }], - "de.ko": [ - { - "vendor_id": "eees", - "device_id": "163c", - "subvendor_id": "*", - "subdevice_id": "*" - }, - { - "vendor_id": "14f4", - "device_id": "16db", - "subvendor_id": "2123", - "subdevice_id": "1123" - }] -} -''' + +# driver_pci_ids example: +# { +# "abc.ko": [ +# { +# "vendor_id": "14e4", +# "device_id": "163c", +# "subvendor_id": "*", +# "subdevice_id": "*" +# }, +# { +# "vendor_id": "14e4", +# "device_id": "163b", +# "subvendor_id": "*", +# "subdevice_id": "*" +# }], +# "de.ko": [ +# { +# "vendor_id": "eees", +# "device_id": "163c", +# "subvendor_id": "*", +# "subdevice_id": "*" +# }, +# { +# "vendor_id": "14f4", +# "device_id": "16db", +# "subvendor_id": "2123", +# "subdevice_id": "1123" +# }] +# } def pci_matches(present_pci_id, driver_pci_ids): """Check if present PCI ID matches any of the driver PCI IDs.""" merged_driver_pci_id_list = [] @@ -236,6 +236,8 @@ def parse_dmv_info(self, fpath): return json_data, json_formatted class DriverMultiVersionManager(object): + dmv_list = {} # type: Dict[str, Any] + def __init__(self, runtime=False): self.runtime = runtime self.dmv_list = { @@ -296,7 +298,7 @@ def parse_dmv_list(self): for _, updates_dir, dmv_dir in get_all_kabi_dirs(): if not os.path.isdir(dmv_dir): continue - + for path, _, files in os.walk(dmv_dir): if "info.json" not in files: continue