mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-04-19 10:47:15 +00:00
Migrate to Ruff for lint and format (#4852)
* Migrate to Ruff for lint and format * Fix pylint issues * DBus property sets into normal awaitable methods * Fix tests relying on separate tasks in connect * Fixes from feedback
This commit is contained in:
parent
1861d756e9
commit
7fd6dce55f
@ -10,11 +10,13 @@
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"charliermarsh.ruff",
|
||||
"ms-python.pylint",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
"redhat.vscode-yaml",
|
||||
"esbenp.prettier-vscode",
|
||||
"GitHub.vscode-pull-request-github"
|
||||
],
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
@ -28,9 +30,9 @@
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"python.pythonPath": "/usr/local/bin/python3",
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py312"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black"
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -52,7 +52,7 @@
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] I have followed the [development checklist][dev-checklist]
|
||||
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
||||
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
|
||||
- [ ] Tests have been added to verify that the new code works.
|
||||
|
||||
If API endpoints of add-on configuration are added/changed:
|
||||
|
179
.github/workflows/ci.yaml
vendored
179
.github/workflows/ci.yaml
vendored
@ -61,8 +61,8 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-black:
|
||||
name: Check black
|
||||
lint-ruff-format:
|
||||
name: Check ruff-format
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
@ -85,10 +85,67 @@ jobs:
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run black
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.0.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run ruff-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
black --target-version py312 --check supervisor tests setup.py
|
||||
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-ruff:
|
||||
name: Check ruff
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.0.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.0.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-dockerfile:
|
||||
name: Check Dockerfile
|
||||
@ -149,79 +206,6 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||
|
||||
lint-flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.0.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register flake8 problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 supervisor tests
|
||||
|
||||
lint-isort:
|
||||
name: Check isort
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.0.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.0.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run isort
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||
|
||||
lint-json:
|
||||
name: Check JSON
|
||||
runs-on: ubuntu-latest
|
||||
@ -298,47 +282,6 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pylint supervisor tests
|
||||
|
||||
lint-pyupgrade:
|
||||
name: Check pyupgrade
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.1.1
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.0.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.0.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run pyupgrade
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
|
30
.github/workflows/matchers/flake8.json
vendored
30
.github/workflows/matchers/flake8.json
vendored
@ -1,30 +0,0 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@ -1,34 +1,15 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.12.1
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.1.14
|
||||
hooks:
|
||||
- id: black
|
||||
- id: ruff
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
- --target-version
|
||||
- py312
|
||||
- --fix
|
||||
- id: ruff-format
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-docstrings==1.7.0
|
||||
- pydocstyle==6.3.0
|
||||
files: ^(supervisor|script|tests)/.+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py312-plus]
|
||||
|
18
.vscode/tasks.json
vendored
18
.vscode/tasks.json
vendored
@ -58,9 +58,23 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"label": "Ruff Check",
|
||||
"type": "shell",
|
||||
"command": "flake8 supervisor tests",
|
||||
"command": "ruff check --fix supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Ruff Format",
|
||||
"type": "shell",
|
||||
"command": "ruff format supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
|
287
pyproject.toml
287
pyproject.toml
@ -44,7 +44,7 @@ good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
|
||||
|
||||
[tool.pylint."MESSAGES CONTROL"]
|
||||
# Reasons disabled:
|
||||
# format - handled by black
|
||||
# format - handled by ruff
|
||||
# abstract-method - with intro of async there are always methods missing
|
||||
# cyclic-import - doesn't test if both import on load
|
||||
# duplicate-code - unavoidable
|
||||
@ -71,6 +71,136 @@ disable = [
|
||||
"too-many-statements",
|
||||
"unused-argument",
|
||||
"consider-using-with",
|
||||
|
||||
# Handled by ruff
|
||||
# Ref: <https://github.com/astral-sh/ruff/issues/970>
|
||||
"await-outside-async", # PLE1142
|
||||
"bad-str-strip-call", # PLE1310
|
||||
"bad-string-format-type", # PLE1307
|
||||
"bidirectional-unicode", # PLE2502
|
||||
"continue-in-finally", # PLE0116
|
||||
"duplicate-bases", # PLE0241
|
||||
"format-needs-mapping", # F502
|
||||
"function-redefined", # F811
|
||||
# Needed because ruff does not understand type of __all__ generated by a function
|
||||
# "invalid-all-format", # PLE0605
|
||||
"invalid-all-object", # PLE0604
|
||||
"invalid-character-backspace", # PLE2510
|
||||
"invalid-character-esc", # PLE2513
|
||||
"invalid-character-nul", # PLE2514
|
||||
"invalid-character-sub", # PLE2512
|
||||
"invalid-character-zero-width-space", # PLE2515
|
||||
"logging-too-few-args", # PLE1206
|
||||
"logging-too-many-args", # PLE1205
|
||||
"missing-format-string-key", # F524
|
||||
"mixed-format-string", # F506
|
||||
"no-method-argument", # N805
|
||||
"no-self-argument", # N805
|
||||
"nonexistent-operator", # B002
|
||||
"nonlocal-without-binding", # PLE0117
|
||||
"not-in-loop", # F701, F702
|
||||
"notimplemented-raised", # F901
|
||||
"return-in-init", # PLE0101
|
||||
"return-outside-function", # F706
|
||||
"syntax-error", # E999
|
||||
"too-few-format-args", # F524
|
||||
"too-many-format-args", # F522
|
||||
"too-many-star-expressions", # F622
|
||||
"truncated-format-string", # F501
|
||||
"undefined-all-variable", # F822
|
||||
"undefined-variable", # F821
|
||||
"used-prior-global-declaration", # PLE0118
|
||||
"yield-inside-async-function", # PLE1700
|
||||
"yield-outside-function", # F704
|
||||
"anomalous-backslash-in-string", # W605
|
||||
"assert-on-string-literal", # PLW0129
|
||||
"assert-on-tuple", # F631
|
||||
"bad-format-string", # W1302, F
|
||||
"bad-format-string-key", # W1300, F
|
||||
"bare-except", # E722
|
||||
"binary-op-exception", # PLW0711
|
||||
"cell-var-from-loop", # B023
|
||||
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
|
||||
"duplicate-except", # B014
|
||||
"duplicate-key", # F601
|
||||
"duplicate-string-formatting-argument", # F
|
||||
"duplicate-value", # F
|
||||
"eval-used", # PGH001
|
||||
"exec-used", # S102
|
||||
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
|
||||
"f-string-without-interpolation", # F541
|
||||
"forgotten-debug-statement", # T100
|
||||
"format-string-without-interpolation", # F
|
||||
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
|
||||
"global-variable-not-assigned", # PLW0602
|
||||
"implicit-str-concat", # ISC001
|
||||
"import-self", # PLW0406
|
||||
"inconsistent-quotes", # Q000
|
||||
"invalid-envvar-default", # PLW1508
|
||||
"keyword-arg-before-vararg", # B026
|
||||
"logging-format-interpolation", # G
|
||||
"logging-fstring-interpolation", # G
|
||||
"logging-not-lazy", # G
|
||||
"misplaced-future", # F404
|
||||
"named-expr-without-context", # PLW0131
|
||||
"nested-min-max", # PLW3301
|
||||
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
||||
"raise-missing-from", # TRY200
|
||||
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
||||
"try-except-raise", # TRY302
|
||||
"unused-argument", # ARG001, we don't use it
|
||||
"unused-format-string-argument", #F507
|
||||
"unused-format-string-key", # F504
|
||||
"unused-import", # F401
|
||||
"unused-variable", # F841
|
||||
"useless-else-on-loop", # PLW0120
|
||||
"wildcard-import", # F403
|
||||
"bad-classmethod-argument", # N804
|
||||
"consider-iterating-dictionary", # SIM118
|
||||
"empty-docstring", # D419
|
||||
"invalid-name", # N815
|
||||
"line-too-long", # E501, disabled globally
|
||||
"missing-class-docstring", # D101
|
||||
"missing-final-newline", # W292
|
||||
"missing-function-docstring", # D103
|
||||
"missing-module-docstring", # D100
|
||||
"multiple-imports", #E401
|
||||
"singleton-comparison", # E711, E712
|
||||
"subprocess-run-check", # PLW1510
|
||||
"superfluous-parens", # UP034
|
||||
"ungrouped-imports", # I001
|
||||
"unidiomatic-typecheck", # E721
|
||||
"unnecessary-direct-lambda-call", # PLC3002
|
||||
"unnecessary-lambda-assignment", # PLC3001
|
||||
"unneeded-not", # SIM208
|
||||
"useless-import-alias", # PLC0414
|
||||
"wrong-import-order", # I001
|
||||
"wrong-import-position", # E402
|
||||
"comparison-of-constants", # PLR0133
|
||||
"comparison-with-itself", # PLR0124
|
||||
# "consider-alternative-union-syntax", # UP007, typing extension
|
||||
"consider-merging-isinstance", # PLR1701
|
||||
# "consider-using-alias", # UP006, typing extension
|
||||
"consider-using-dict-comprehension", # C402
|
||||
"consider-using-generator", # C417
|
||||
"consider-using-get", # SIM401
|
||||
"consider-using-set-comprehension", # C401
|
||||
"consider-using-sys-exit", # PLR1722
|
||||
"consider-using-ternary", # SIM108
|
||||
"literal-comparison", # F632
|
||||
"property-with-parameters", # PLR0206
|
||||
"super-with-arguments", # UP008
|
||||
"too-many-branches", # PLR0912
|
||||
"too-many-return-statements", # PLR0911
|
||||
"too-many-statements", # PLR0915
|
||||
"trailing-comma-tuple", # COM818
|
||||
"unnecessary-comprehension", # C416
|
||||
"use-a-generator", # C417
|
||||
"use-dict-literal", # C406
|
||||
"use-list-literal", # C405
|
||||
"useless-object-inheritance", # UP004
|
||||
"useless-return", # PLR1711
|
||||
# "no-self-use", # PLR6301 # Optional plugin, not enabled
|
||||
]
|
||||
|
||||
[tool.pylint.REPORTS]
|
||||
@ -97,16 +227,145 @@ filterwarnings = [
|
||||
"ignore::pytest.PytestUnraisableExceptionWarning",
|
||||
]
|
||||
|
||||
[tool.isort]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma = true
|
||||
force_grid_wrap = 0
|
||||
line_length = 88
|
||||
indent = " "
|
||||
force_sort_within_sections = true
|
||||
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
|
||||
default_section = "THIRDPARTY"
|
||||
forced_separate = "tests"
|
||||
combine_as_imports = true
|
||||
use_parentheses = true
|
||||
known_first_party = ["supervisor", "tests"]
|
||||
[tool.ruff]
|
||||
select = [
|
||||
"B002", # Python does not support the unary prefix increment
|
||||
"B007", # Loop control variable {name} not used within loop body
|
||||
"B014", # Exception handler with duplicate exception
|
||||
"B023", # Function definition does not bind loop variable {name}
|
||||
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
|
||||
"C", # complexity
|
||||
"COM818", # Trailing comma on bare tuple prohibited
|
||||
"D", # docstrings
|
||||
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
|
||||
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
|
||||
"E", # pycodestyle
|
||||
"F", # pyflakes/autoflake
|
||||
"G", # flake8-logging-format
|
||||
"I", # isort
|
||||
"ICN001", # import concentions; {name} should be imported as {asname}
|
||||
"N804", # First argument of a class method should be named cls
|
||||
"N805", # First argument of a method should be named self
|
||||
"N815", # Variable {name} in class scope should not be mixedCase
|
||||
"PGH001", # No builtin eval() allowed
|
||||
"PGH004", # Use specific rule codes when using noqa
|
||||
"PLC0414", # Useless import alias. Import alias does not rename original package.
|
||||
"PLC", # pylint
|
||||
"PLE", # pylint
|
||||
"PLR", # pylint
|
||||
"PLW", # pylint
|
||||
"Q000", # Double quotes found but single quotes preferred
|
||||
"RUF006", # Store a reference to the return value of asyncio.create_task
|
||||
"S102", # Use of exec detected
|
||||
"S103", # bad-file-permissions
|
||||
"S108", # hardcoded-temp-file
|
||||
"S306", # suspicious-mktemp-usage
|
||||
"S307", # suspicious-eval-usage
|
||||
"S313", # suspicious-xmlc-element-tree-usage
|
||||
"S314", # suspicious-xml-element-tree-usage
|
||||
"S315", # suspicious-xml-expat-reader-usage
|
||||
"S316", # suspicious-xml-expat-builder-usage
|
||||
"S317", # suspicious-xml-sax-usage
|
||||
"S318", # suspicious-xml-mini-dom-usage
|
||||
"S319", # suspicious-xml-pull-dom-usage
|
||||
"S320", # suspicious-xmle-tree-usage
|
||||
"S601", # paramiko-call
|
||||
"S602", # subprocess-popen-with-shell-equals-true
|
||||
"S604", # call-with-shell-equals-true
|
||||
"S608", # hardcoded-sql-expression
|
||||
"S609", # unix-command-wildcard-injection
|
||||
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
|
||||
"SIM117", # Merge with-statements that use the same scope
|
||||
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
|
||||
"SIM201", # Use {left} != {right} instead of not {left} == {right}
|
||||
"SIM208", # Use {expr} instead of not (not {expr})
|
||||
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
|
||||
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
|
||||
"SIM401", # Use get from dict with default instead of an if block
|
||||
"T100", # Trace found: {name} used
|
||||
"T20", # flake8-print
|
||||
"TID251", # Banned imports
|
||||
"TRY004", # Prefer TypeError exception for invalid type
|
||||
"TRY200", # Use raise from to specify exception cause
|
||||
"TRY302", # Remove exception handler; error is immediately re-raised
|
||||
"UP", # pyupgrade
|
||||
"W", # pycodestyle
|
||||
]
|
||||
|
||||
ignore = [
|
||||
"D202", # No blank lines allowed after function docstring
|
||||
"D203", # 1 blank line required before class docstring
|
||||
"D213", # Multi-line docstring summary should start at the second line
|
||||
"D406", # Section name should end with a newline
|
||||
"D407", # Section name underlining
|
||||
"E501", # line too long
|
||||
"E731", # do not assign a lambda expression, use a def
|
||||
|
||||
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
|
||||
# be ignored anymore without warnings.
|
||||
# https://github.com/astral-sh/ruff/issues/7491
|
||||
# "PLC1901", # Lots of false positives
|
||||
|
||||
# False positives https://github.com/astral-sh/ruff/issues/5386
|
||||
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
|
||||
"PLR0911", # Too many return statements ({returns} > {max_returns})
|
||||
"PLR0912", # Too many branches ({branches} > {max_branches})
|
||||
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
|
||||
"PLR0915", # Too many statements ({statements} > {max_statements})
|
||||
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
||||
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
||||
"UP006", # keep type annotation style as is
|
||||
"UP007", # keep type annotation style as is
|
||||
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
|
||||
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
|
||||
|
||||
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
||||
"W191",
|
||||
"E111",
|
||||
"E114",
|
||||
"E117",
|
||||
"D206",
|
||||
"D300",
|
||||
"Q000",
|
||||
"Q001",
|
||||
"Q002",
|
||||
"Q003",
|
||||
"COM812",
|
||||
"COM819",
|
||||
"ISC001",
|
||||
"ISC002",
|
||||
|
||||
# Disabled because ruff does not understand type of __all__ generated by a function
|
||||
"PLE0605",
|
||||
]
|
||||
|
||||
[tool.ruff.flake8-import-conventions.extend-aliases]
|
||||
voluptuous = "vol"
|
||||
|
||||
[tool.ruff.flake8-pytest-style]
|
||||
fixture-parentheses = false
|
||||
|
||||
[tool.ruff.flake8-tidy-imports.banned-api]
|
||||
"pytz".msg = "use zoneinfo instead"
|
||||
|
||||
[tool.ruff.isort]
|
||||
force-sort-within-sections = true
|
||||
section-order = [
|
||||
"future",
|
||||
"standard-library",
|
||||
"third-party",
|
||||
"first-party",
|
||||
"local-folder",
|
||||
]
|
||||
forced-separate = ["tests"]
|
||||
known-first-party = ["supervisor", "tests"]
|
||||
combine-as-imports = true
|
||||
split-on-trailing-comma = false
|
||||
|
||||
[tool.ruff.per-file-ignores]
|
||||
|
||||
# DBus Service Mocks must use typing and names understood by dbus-fast
|
||||
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
max-complexity = 25
|
||||
|
@ -1,7 +1,6 @@
|
||||
aiodns==3.1.1
|
||||
aiohttp==3.9.3
|
||||
aiohttp-fast-url-dispatcher==0.3.0
|
||||
async_timeout==4.0.3
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
attrs==23.2.0
|
||||
awesomeversion==23.11.0
|
||||
|
@ -1,16 +1,12 @@
|
||||
black==23.12.1
|
||||
coverage==7.4.1
|
||||
flake8-docstrings==1.7.0
|
||||
flake8==7.0.0
|
||||
pre-commit==3.6.0
|
||||
pydocstyle==6.3.0
|
||||
pylint==3.0.3
|
||||
pytest-aiohttp==1.0.5
|
||||
pytest-asyncio==0.23.3
|
||||
pytest-cov==4.1.0
|
||||
pytest-timeout==2.2.0
|
||||
pytest==7.4.4
|
||||
pyupgrade==3.15.0
|
||||
ruff==0.1.14
|
||||
time-machine==2.13.0
|
||||
typing_extensions==4.9.0
|
||||
urllib3==2.2.0
|
||||
|
17
setup.cfg
17
setup.cfg
@ -1,17 +0,0 @@
|
||||
[flake8]
|
||||
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
||||
doctests = True
|
||||
max-line-length = 88
|
||||
# E501: line too long
|
||||
# W503: Line break occurred before a binary operator
|
||||
# E203: Whitespace before ':'
|
||||
# D202 No blank lines allowed after function docstring
|
||||
# W504 line break after binary operator
|
||||
ignore =
|
||||
E501,
|
||||
W503,
|
||||
E203,
|
||||
D202,
|
||||
W504
|
||||
per-file-ignores =
|
||||
tests/dbus_service_mocks/*.py: F821,F722
|
@ -130,13 +130,17 @@ class APIOS(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
||||
|
||||
if ATTR_ACTIVITY_LED in body:
|
||||
self.sys_dbus.agent.board.green.activity_led = body[ATTR_ACTIVITY_LED]
|
||||
await self.sys_dbus.agent.board.green.set_activity_led(
|
||||
body[ATTR_ACTIVITY_LED]
|
||||
)
|
||||
|
||||
if ATTR_POWER_LED in body:
|
||||
self.sys_dbus.agent.board.green.power_led = body[ATTR_POWER_LED]
|
||||
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
|
||||
|
||||
if ATTR_SYSTEM_HEALTH_LED in body:
|
||||
self.sys_dbus.agent.board.green.user_led = body[ATTR_SYSTEM_HEALTH_LED]
|
||||
await self.sys_dbus.agent.board.green.set_user_led(
|
||||
body[ATTR_SYSTEM_HEALTH_LED]
|
||||
)
|
||||
|
||||
self.sys_dbus.agent.board.green.save_data()
|
||||
|
||||
@ -155,13 +159,15 @@ class APIOS(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
||||
|
||||
if ATTR_DISK_LED in body:
|
||||
self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED]
|
||||
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
|
||||
|
||||
if ATTR_HEARTBEAT_LED in body:
|
||||
self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED]
|
||||
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
|
||||
body[ATTR_HEARTBEAT_LED]
|
||||
)
|
||||
|
||||
if ATTR_POWER_LED in body:
|
||||
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
|
||||
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
|
||||
|
||||
self.sys_dbus.agent.board.yellow.save_data()
|
||||
self.sys_resolution.create_issue(
|
||||
|
@ -140,7 +140,7 @@ class APISupervisor(CoreSysAttributes):
|
||||
|
||||
if ATTR_DIAGNOSTICS in body:
|
||||
self.sys_config.diagnostics = body[ATTR_DIAGNOSTICS]
|
||||
self.sys_dbus.agent.diagnostics = body[ATTR_DIAGNOSTICS]
|
||||
await self.sys_dbus.agent.set_diagnostics(body[ATTR_DIAGNOSTICS])
|
||||
|
||||
if body[ATTR_DIAGNOSTICS]:
|
||||
init_sentry(self.coresys)
|
||||
|
@ -53,7 +53,7 @@ def unique_addons(addons_list):
|
||||
|
||||
|
||||
def v1_homeassistant(
|
||||
homeassistant_data: dict[str, Any] | None
|
||||
homeassistant_data: dict[str, Any] | None,
|
||||
) -> dict[str, Any] | None:
|
||||
"""Cleanup homeassistant artefacts from v1."""
|
||||
if not homeassistant_data:
|
||||
|
@ -5,8 +5,6 @@ from contextlib import suppress
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .const import (
|
||||
ATTR_STARTUP,
|
||||
RUN_SUPERVISOR_STATE,
|
||||
@ -179,7 +177,15 @@ class Core(CoreSysAttributes):
|
||||
and not self.sys_dev
|
||||
and self.supported
|
||||
):
|
||||
self.sys_dbus.agent.diagnostics = self.sys_config.diagnostics
|
||||
try:
|
||||
await self.sys_dbus.agent.set_diagnostics(self.sys_config.diagnostics)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning(
|
||||
"Could not set diagnostics to %s due to %s",
|
||||
self.sys_config.diagnostics,
|
||||
err,
|
||||
)
|
||||
capture_exception(err)
|
||||
|
||||
# Evaluate the system
|
||||
await self.sys_resolution.evaluate.evaluate_system()
|
||||
@ -298,7 +304,7 @@ class Core(CoreSysAttributes):
|
||||
|
||||
# Stage 1
|
||||
try:
|
||||
async with async_timeout.timeout(10):
|
||||
async with asyncio.timeout(10):
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_create_task(coro)
|
||||
@ -314,7 +320,7 @@ class Core(CoreSysAttributes):
|
||||
|
||||
# Stage 2
|
||||
try:
|
||||
async with async_timeout.timeout(10):
|
||||
async with asyncio.timeout(10):
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_create_task(coro)
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""OS-Agent implementation for DBUS."""
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@ -80,11 +81,9 @@ class OSAgent(DBusInterfaceProxy):
|
||||
"""Return if diagnostics is enabled on OS-Agent."""
|
||||
return self.properties[DBUS_ATTR_DIAGNOSTICS]
|
||||
|
||||
@diagnostics.setter
|
||||
@dbus_property
|
||||
def diagnostics(self, value: bool) -> None:
|
||||
def set_diagnostics(self, value: bool) -> Awaitable[None]:
|
||||
"""Enable or disable OS-Agent diagnostics."""
|
||||
asyncio.create_task(self.dbus.set_diagnostics(value))
|
||||
return self.dbus.set_diagnostics(value)
|
||||
|
||||
@property
|
||||
def all(self) -> list[DBusInterface]:
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Green board management."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
|
||||
@ -25,11 +26,10 @@ class Green(BoardProxy):
|
||||
"""Get activity LED enabled."""
|
||||
return self.properties[DBUS_ATTR_ACTIVITY_LED]
|
||||
|
||||
@activity_led.setter
|
||||
def activity_led(self, enabled: bool) -> None:
|
||||
def set_activity_led(self, enabled: bool) -> Awaitable[None]:
|
||||
"""Enable/disable activity LED."""
|
||||
self._data[ATTR_ACTIVITY_LED] = enabled
|
||||
asyncio.create_task(self.dbus.Boards.Green.set_activity_led(enabled))
|
||||
return self.dbus.Boards.Green.set_activity_led(enabled)
|
||||
|
||||
@property
|
||||
@dbus_property
|
||||
@ -37,11 +37,10 @@ class Green(BoardProxy):
|
||||
"""Get power LED enabled."""
|
||||
return self.properties[DBUS_ATTR_POWER_LED]
|
||||
|
||||
@power_led.setter
|
||||
def power_led(self, enabled: bool) -> None:
|
||||
def set_power_led(self, enabled: bool) -> Awaitable[None]:
|
||||
"""Enable/disable power LED."""
|
||||
self._data[ATTR_POWER_LED] = enabled
|
||||
asyncio.create_task(self.dbus.Boards.Green.set_power_led(enabled))
|
||||
return self.dbus.Boards.Green.set_power_led(enabled)
|
||||
|
||||
@property
|
||||
@dbus_property
|
||||
@ -49,17 +48,18 @@ class Green(BoardProxy):
|
||||
"""Get user LED enabled."""
|
||||
return self.properties[DBUS_ATTR_USER_LED]
|
||||
|
||||
@user_led.setter
|
||||
def user_led(self, enabled: bool) -> None:
|
||||
def set_user_led(self, enabled: bool) -> Awaitable[None]:
|
||||
"""Enable/disable disk LED."""
|
||||
self._data[ATTR_USER_LED] = enabled
|
||||
asyncio.create_task(self.dbus.Boards.Green.set_user_led(enabled))
|
||||
return self.dbus.Boards.Green.set_user_led(enabled)
|
||||
|
||||
async def connect(self, bus: MessageBus) -> None:
|
||||
"""Connect to D-Bus."""
|
||||
await super().connect(bus)
|
||||
|
||||
# Set LEDs based on settings on connect
|
||||
self.activity_led = self._data[ATTR_ACTIVITY_LED]
|
||||
self.power_led = self._data[ATTR_POWER_LED]
|
||||
self.user_led = self._data[ATTR_USER_LED]
|
||||
await asyncio.gather(
|
||||
self.set_activity_led(self._data[ATTR_ACTIVITY_LED]),
|
||||
self.set_power_led(self._data[ATTR_POWER_LED]),
|
||||
self.set_user_led(self._data[ATTR_USER_LED]),
|
||||
)
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""Yellow board management."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
|
||||
@ -25,11 +26,10 @@ class Yellow(BoardProxy):
|
||||
"""Get heartbeat LED enabled."""
|
||||
return self.properties[DBUS_ATTR_HEARTBEAT_LED]
|
||||
|
||||
@heartbeat_led.setter
|
||||
def heartbeat_led(self, enabled: bool) -> None:
|
||||
def set_heartbeat_led(self, enabled: bool) -> Awaitable[None]:
|
||||
"""Enable/disable heartbeat LED."""
|
||||
self._data[ATTR_HEARTBEAT_LED] = enabled
|
||||
asyncio.create_task(self.dbus.Boards.Yellow.set_heartbeat_led(enabled))
|
||||
return self.dbus.Boards.Yellow.set_heartbeat_led(enabled)
|
||||
|
||||
@property
|
||||
@dbus_property
|
||||
@ -37,11 +37,10 @@ class Yellow(BoardProxy):
|
||||
"""Get power LED enabled."""
|
||||
return self.properties[DBUS_ATTR_POWER_LED]
|
||||
|
||||
@power_led.setter
|
||||
def power_led(self, enabled: bool) -> None:
|
||||
def set_power_led(self, enabled: bool) -> Awaitable[None]:
|
||||
"""Enable/disable power LED."""
|
||||
self._data[ATTR_POWER_LED] = enabled
|
||||
asyncio.create_task(self.dbus.Boards.Yellow.set_power_led(enabled))
|
||||
return self.dbus.Boards.Yellow.set_power_led(enabled)
|
||||
|
||||
@property
|
||||
@dbus_property
|
||||
@ -49,17 +48,18 @@ class Yellow(BoardProxy):
|
||||
"""Get disk LED enabled."""
|
||||
return self.properties[DBUS_ATTR_DISK_LED]
|
||||
|
||||
@disk_led.setter
|
||||
def disk_led(self, enabled: bool) -> None:
|
||||
def set_disk_led(self, enabled: bool) -> Awaitable[None]:
|
||||
"""Enable/disable disk LED."""
|
||||
self._data[ATTR_DISK_LED] = enabled
|
||||
asyncio.create_task(self.dbus.Boards.Yellow.set_disk_led(enabled))
|
||||
return self.dbus.Boards.Yellow.set_disk_led(enabled)
|
||||
|
||||
async def connect(self, bus: MessageBus) -> None:
|
||||
"""Connect to D-Bus."""
|
||||
await super().connect(bus)
|
||||
|
||||
# Set LEDs based on settings on connect
|
||||
self.disk_led = self._data[ATTR_DISK_LED]
|
||||
self.heartbeat_led = self._data[ATTR_HEARTBEAT_LED]
|
||||
self.power_led = self._data[ATTR_POWER_LED]
|
||||
await asyncio.gather(
|
||||
self.set_disk_led(self._data[ATTR_DISK_LED]),
|
||||
self.set_heartbeat_led(self._data[ATTR_HEARTBEAT_LED]),
|
||||
self.set_power_led(self._data[ATTR_POWER_LED]),
|
||||
)
|
||||
|
@ -7,6 +7,8 @@ from uuid import uuid4
|
||||
|
||||
from dbus_fast import Variant
|
||||
|
||||
from ....host.const import InterfaceMethod, InterfaceType
|
||||
from .. import NetworkManager
|
||||
from . import (
|
||||
ATTR_ASSIGNED_MAC,
|
||||
CONF_ATTR_802_ETHERNET,
|
||||
@ -19,8 +21,6 @@ from . import (
|
||||
CONF_ATTR_PATH,
|
||||
CONF_ATTR_VLAN,
|
||||
)
|
||||
from .. import NetworkManager
|
||||
from ....host.const import InterfaceMethod, InterfaceType
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....host.configuration import Interface
|
||||
|
@ -42,9 +42,7 @@ def systemd_errors(func):
|
||||
return await func(*args, **kwds)
|
||||
except DBusFatalError as err:
|
||||
if err.type == DBUS_ERR_SYSTEMD_NO_SUCH_UNIT:
|
||||
# pylint: disable=raise-missing-from
|
||||
raise DBusSystemdNoSuchUnit(str(err))
|
||||
# pylint: enable=raise-missing-from
|
||||
raise DBusSystemdNoSuchUnit(str(err)) from None
|
||||
raise err
|
||||
|
||||
return wrapper
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Interface to UDisks2 Drive over D-Bus."""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from dbus_fast.aio import MessageBus
|
||||
|
||||
@ -95,7 +95,7 @@ class UDisks2Drive(DBusInterfaceProxy):
|
||||
"""Return time drive first detected."""
|
||||
return datetime.fromtimestamp(
|
||||
self.properties[DBUS_ATTR_TIME_DETECTED] * 10**-6
|
||||
).astimezone(timezone.utc)
|
||||
).astimezone(UTC)
|
||||
|
||||
@property
|
||||
@dbus_property
|
||||
|
@ -233,10 +233,10 @@ class DockerAddon(DockerInterface):
|
||||
tmpfs = {}
|
||||
|
||||
if self.addon.with_tmpfs:
|
||||
tmpfs["/tmp"] = ""
|
||||
tmpfs["/tmp"] = "" # noqa: S108
|
||||
|
||||
if not self.addon.host_ipc:
|
||||
tmpfs["/dev/shm"] = ""
|
||||
tmpfs["/dev/shm"] = "" # noqa: S108
|
||||
|
||||
# Return None if no tmpfs is present
|
||||
if tmpfs:
|
||||
|
@ -175,7 +175,7 @@ class DockerHomeAssistant(DockerInterface):
|
||||
ENV_TOKEN: self.sys_homeassistant.supervisor_token,
|
||||
ENV_TOKEN_OLD: self.sys_homeassistant.supervisor_token,
|
||||
},
|
||||
tmpfs={"/tmp": ""},
|
||||
tmpfs={"/tmp": ""}, # noqa: S108
|
||||
oom_score_adj=-300,
|
||||
)
|
||||
_LOGGER.info(
|
||||
|
@ -1,4 +1,6 @@
|
||||
"""Supervisor docker monitor based on events."""
|
||||
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from threading import Thread
|
||||
@ -47,10 +49,8 @@ class DockerMonitor(CoreSysAttributes, Thread):
|
||||
async def unload(self):
|
||||
"""Stop docker events monitor."""
|
||||
self._events.close()
|
||||
try:
|
||||
with suppress(RuntimeError):
|
||||
self.join(timeout=5)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
_LOGGER.info("Stopped docker events monitor")
|
||||
|
||||
|
@ -1,9 +1,9 @@
|
||||
"""Home Assistant control object."""
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from datetime import datetime, timedelta
|
||||
from contextlib import AbstractAsyncContextManager, asynccontextmanager, suppress
|
||||
from datetime import UTC, datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, AsyncContextManager
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs
|
||||
@ -39,9 +39,8 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
)
|
||||
async def ensure_access_token(self) -> None:
|
||||
"""Ensure there is an access token."""
|
||||
if (
|
||||
self.access_token is not None
|
||||
and self._access_token_expires > datetime.utcnow()
|
||||
if self.access_token is not None and self._access_token_expires > datetime.now(
|
||||
tz=UTC
|
||||
):
|
||||
return
|
||||
|
||||
@ -63,7 +62,7 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
_LOGGER.info("Updated Home Assistant API token")
|
||||
tokens = await resp.json()
|
||||
self.access_token = tokens["access_token"]
|
||||
self._access_token_expires = datetime.utcnow() + timedelta(
|
||||
self._access_token_expires = datetime.now(tz=UTC) + timedelta(
|
||||
seconds=tokens["expires_in"]
|
||||
)
|
||||
|
||||
@ -78,7 +77,7 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
timeout: int = 30,
|
||||
params: dict[str, str] | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
) -> AsyncContextManager[aiohttp.ClientResponse]:
|
||||
) -> AbstractAsyncContextManager[aiohttp.ClientResponse]:
|
||||
"""Async context manager to make a request with right auth."""
|
||||
url = f"{self.sys_homeassistant.api_url}/{path}"
|
||||
headers = headers or {}
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""AppArmor control for host."""
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import errno
|
||||
import logging
|
||||
from pathlib import Path
|
||||
@ -62,10 +63,8 @@ class AppArmorControl(CoreSysAttributes):
|
||||
# Load profiles
|
||||
if self.available:
|
||||
for profile_name in self._profiles:
|
||||
try:
|
||||
with suppress(HostAppArmorError):
|
||||
await self._load_profile(profile_name)
|
||||
except HostAppArmorError:
|
||||
pass
|
||||
else:
|
||||
_LOGGER.warning("AppArmor is not enabled on host")
|
||||
|
||||
|
@ -69,8 +69,7 @@ class LogsControl(CoreSysAttributes):
|
||||
)
|
||||
|
||||
async def get_boot_id(self, offset: int = 0) -> str:
|
||||
"""
|
||||
Get ID of a boot by offset.
|
||||
"""Get ID of a boot by offset.
|
||||
|
||||
Current boot is offset = 0, negative numbers go that many in the past.
|
||||
Positive numbers count up from the oldest boot.
|
||||
|
@ -155,11 +155,10 @@ class SoundControl(CoreSysAttributes):
|
||||
stream = pulse.source_output_info(index)
|
||||
else:
|
||||
stream = pulse.source_info(index)
|
||||
elif application:
|
||||
stream = pulse.sink_input_info(index)
|
||||
else:
|
||||
if application:
|
||||
stream = pulse.sink_input_info(index)
|
||||
else:
|
||||
stream = pulse.sink_info(index)
|
||||
stream = pulse.sink_info(index)
|
||||
|
||||
# Set volume
|
||||
pulse.volume_set_all_chans(stream, volume)
|
||||
@ -190,11 +189,10 @@ class SoundControl(CoreSysAttributes):
|
||||
stream = pulse.source_output_info(index)
|
||||
else:
|
||||
stream = pulse.source_info(index)
|
||||
elif application:
|
||||
stream = pulse.sink_input_info(index)
|
||||
else:
|
||||
if application:
|
||||
stream = pulse.sink_input_info(index)
|
||||
else:
|
||||
stream = pulse.sink_info(index)
|
||||
stream = pulse.sink_info(index)
|
||||
|
||||
# Mute stream
|
||||
pulse.mute(stream, mute)
|
||||
|
@ -1,12 +1,12 @@
|
||||
"""Job decorator."""
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
from datetime import datetime, timedelta
|
||||
from functools import wraps
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from . import SupervisorJob
|
||||
from ..const import CoreState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
@ -18,6 +18,7 @@ from ..exceptions import (
|
||||
from ..host.const import HostFeature
|
||||
from ..resolution.const import MINIMUM_FREE_SPACE_THRESHOLD, ContextType, IssueType
|
||||
from ..utils.sentry import capture_exception
|
||||
from . import SupervisorJob
|
||||
from .const import JobCondition, JobExecutionLimit
|
||||
from .job_group import JobGroup
|
||||
|
||||
@ -146,10 +147,8 @@ class Job(CoreSysAttributes):
|
||||
def _post_init(self, obj: JobGroup | CoreSysAttributes) -> JobGroup | None:
|
||||
"""Runtime init."""
|
||||
# Coresys
|
||||
try:
|
||||
with suppress(AttributeError):
|
||||
self.coresys = obj.coresys
|
||||
except AttributeError:
|
||||
pass
|
||||
if not self.coresys:
|
||||
raise RuntimeError(f"Job on {self.name} need to be an coresys object!")
|
||||
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
from asyncio import Lock
|
||||
|
||||
from . import SupervisorJob
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import JobException, JobGroupExecutionLimitExceeded
|
||||
from . import SupervisorJob
|
||||
|
||||
|
||||
class JobGroup(CoreSysAttributes):
|
||||
|
@ -5,7 +5,6 @@ from datetime import date, datetime, time, timedelta
|
||||
import logging
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
import async_timeout
|
||||
import attr
|
||||
|
||||
from ..const import CoreState
|
||||
@ -113,7 +112,7 @@ class Scheduler(CoreSysAttributes):
|
||||
|
||||
# Wait until all are shutdown
|
||||
try:
|
||||
async with async_timeout.timeout(timeout):
|
||||
async with asyncio.timeout(timeout):
|
||||
await asyncio.wait(running)
|
||||
except TimeoutError:
|
||||
_LOGGER.error("Timeout while waiting for jobs shutdown")
|
||||
|
@ -102,7 +102,7 @@ class PluginBase(ABC, FileConfiguration, CoreSysAttributes):
|
||||
|
||||
async def watchdog_container(self, event: DockerContainerStateEvent) -> None:
|
||||
"""Process state changes in plugin container and restart if necessary."""
|
||||
if not (event.name == self.instance.name):
|
||||
if event.name != self.instance.name:
|
||||
return
|
||||
|
||||
if event.state in {ContainerState.FAILED, ContainerState.UNHEALTHY}:
|
||||
|
@ -25,12 +25,15 @@ class CheckBackups(CheckBase):
|
||||
|
||||
async def approve_check(self, reference: str | None = None) -> bool:
|
||||
"""Approve check if it is affected by issue."""
|
||||
return 0 == len(
|
||||
[
|
||||
backup
|
||||
for backup in self.sys_backups.list_backups
|
||||
if backup.sys_type == BackupType.FULL and backup.is_current
|
||||
]
|
||||
return (
|
||||
len(
|
||||
[
|
||||
backup
|
||||
for backup in self.sys_backups.list_backups
|
||||
if backup.sys_type == BackupType.FULL and backup.is_current
|
||||
]
|
||||
)
|
||||
== 0
|
||||
)
|
||||
|
||||
@property
|
||||
|
@ -26,12 +26,15 @@ class CheckFreeSpace(CheckBase):
|
||||
return
|
||||
|
||||
suggestions: list[SuggestionType] = []
|
||||
if MINIMUM_FULL_BACKUPS < len(
|
||||
[
|
||||
backup
|
||||
for backup in self.sys_backups.list_backups
|
||||
if backup.sys_type == BackupType.FULL
|
||||
]
|
||||
if (
|
||||
len(
|
||||
[
|
||||
backup
|
||||
for backup in self.sys_backups.list_backups
|
||||
if backup.sys_type == BackupType.FULL
|
||||
]
|
||||
)
|
||||
> MINIMUM_FULL_BACKUPS
|
||||
):
|
||||
suggestions.append(SuggestionType.CLEAR_FULL_BACKUP)
|
||||
|
||||
|
@ -28,10 +28,9 @@ class EvaluateBase(ABC, CoreSysAttributes):
|
||||
self.on_failure,
|
||||
self.reason,
|
||||
)
|
||||
else:
|
||||
if self.reason in self.sys_resolution.unsupported:
|
||||
_LOGGER.info("Clearing %s as reason for unsupported", self.reason)
|
||||
self.sys_resolution.dismiss_unsupported(self.reason)
|
||||
elif self.reason in self.sys_resolution.unsupported:
|
||||
_LOGGER.info("Clearing %s as reason for unsupported", self.reason)
|
||||
self.sys_resolution.dismiss_unsupported(self.reason)
|
||||
|
||||
@abstractmethod
|
||||
async def evaluate(self) -> bool:
|
||||
|
@ -31,10 +31,14 @@ class EvaluateDNSServer(EvaluateBase):
|
||||
|
||||
async def evaluate(self) -> None:
|
||||
"""Run evaluation."""
|
||||
return not self.sys_plugins.dns.fallback and 0 < len(
|
||||
[
|
||||
issue
|
||||
for issue in self.sys_resolution.issues
|
||||
if issue.context == ContextType.DNS_SERVER
|
||||
]
|
||||
return (
|
||||
not self.sys_plugins.dns.fallback
|
||||
and len(
|
||||
[
|
||||
issue
|
||||
for issue in self.sys_resolution.issues
|
||||
if issue.context == ContextType.DNS_SERVER
|
||||
]
|
||||
)
|
||||
> 0
|
||||
)
|
||||
|
@ -23,7 +23,7 @@ class FixupSystemClearFullBackup(FixupBase):
|
||||
x for x in self.sys_backups.list_backups if x.sys_type == BackupType.FULL
|
||||
]
|
||||
|
||||
if MINIMUM_FULL_BACKUPS >= len(full_backups):
|
||||
if len(full_backups) <= MINIMUM_FULL_BACKUPS:
|
||||
return
|
||||
|
||||
_LOGGER.info("Starting removal of old full backups")
|
||||
|
@ -1,5 +1,4 @@
|
||||
"""
|
||||
Helper to notify Core about issues.
|
||||
"""Helper to notify Core about issues.
|
||||
|
||||
This helper creates persistant notification in the Core UI.
|
||||
In the future we want to remove this in favour of a "center" in the UI.
|
||||
|
@ -9,11 +9,10 @@ from pathlib import Path
|
||||
import shlex
|
||||
from typing import Final
|
||||
|
||||
import async_timeout
|
||||
from dirhash import dirhash
|
||||
|
||||
from . import clean_env
|
||||
from ..exceptions import CodeNotaryBackendError, CodeNotaryError, CodeNotaryUntrusted
|
||||
from . import clean_env
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -67,7 +66,7 @@ async def cas_validate(
|
||||
env=clean_env(),
|
||||
)
|
||||
|
||||
async with async_timeout.timeout(15):
|
||||
async with asyncio.timeout(15):
|
||||
data, error = await proc.communicate()
|
||||
except TimeoutError:
|
||||
raise CodeNotaryBackendError(
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Common utils."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
@ -101,7 +102,5 @@ class FileConfiguration:
|
||||
self.read_data()
|
||||
else:
|
||||
# write
|
||||
try:
|
||||
with suppress(ConfigurationFileError):
|
||||
write_json_or_yaml_file(self._file, self._data)
|
||||
except ConfigurationFileError:
|
||||
pass
|
||||
|
@ -110,7 +110,7 @@ class DBus:
|
||||
)
|
||||
return await getattr(proxy_interface, method)(*args)
|
||||
except DBusFastDBusError as err:
|
||||
raise DBus.from_dbus_error(err)
|
||||
raise DBus.from_dbus_error(err) from None
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
capture_exception(err)
|
||||
raise DBusFatalError(str(err)) from err
|
||||
@ -134,7 +134,7 @@ class DBus:
|
||||
f"Can't parse introspect data: {err}", _LOGGER.error
|
||||
) from err
|
||||
except DBusFastDBusError as err:
|
||||
raise DBus.from_dbus_error(err)
|
||||
raise DBus.from_dbus_error(err) from None
|
||||
except (EOFError, TimeoutError):
|
||||
_LOGGER.warning(
|
||||
"Busy system at %s - %s", self.bus_name, self.object_path
|
||||
|
@ -8,7 +8,7 @@ from yaml import YAMLError, dump, load
|
||||
try:
|
||||
from yaml import CDumper as Dumper, CSafeLoader as SafeLoader
|
||||
except ImportError:
|
||||
from yaml import SafeLoader, Dumper
|
||||
from yaml import Dumper, SafeLoader
|
||||
|
||||
from ..exceptions import YamlFileError
|
||||
|
||||
|
@ -96,6 +96,7 @@ async def test_api_addon_start_healthcheck(
|
||||
assert install_addon_ssh.state == AddonState.STOPPED
|
||||
|
||||
state_changes: list[AddonState] = []
|
||||
_container_events_task: asyncio.Task | None = None
|
||||
|
||||
async def container_events():
|
||||
nonlocal state_changes
|
||||
@ -111,7 +112,8 @@ async def test_api_addon_start_healthcheck(
|
||||
)
|
||||
|
||||
async def container_events_task(*args, **kwargs):
|
||||
asyncio.create_task(container_events())
|
||||
nonlocal _container_events_task
|
||||
_container_events_task = asyncio.create_task(container_events())
|
||||
|
||||
with patch.object(DockerAddon, "run", new=container_events_task):
|
||||
resp = await api_client.post("/addons/local_ssh/start")
|
||||
@ -137,6 +139,7 @@ async def test_api_addon_restart_healthcheck(
|
||||
assert install_addon_ssh.state == AddonState.STOPPED
|
||||
|
||||
state_changes: list[AddonState] = []
|
||||
_container_events_task: asyncio.Task | None = None
|
||||
|
||||
async def container_events():
|
||||
nonlocal state_changes
|
||||
@ -152,7 +155,8 @@ async def test_api_addon_restart_healthcheck(
|
||||
)
|
||||
|
||||
async def container_events_task(*args, **kwargs):
|
||||
asyncio.create_task(container_events())
|
||||
nonlocal _container_events_task
|
||||
_container_events_task = asyncio.create_task(container_events())
|
||||
|
||||
with patch.object(DockerAddon, "run", new=container_events_task):
|
||||
resp = await api_client.post("/addons/local_ssh/restart")
|
||||
@ -180,6 +184,7 @@ async def test_api_addon_rebuild_healthcheck(
|
||||
assert install_addon_ssh.state == AddonState.STARTUP
|
||||
|
||||
state_changes: list[AddonState] = []
|
||||
_container_events_task: asyncio.Task | None = None
|
||||
|
||||
async def container_events():
|
||||
nonlocal state_changes
|
||||
@ -200,7 +205,8 @@ async def test_api_addon_rebuild_healthcheck(
|
||||
)
|
||||
|
||||
async def container_events_task(*args, **kwargs):
|
||||
asyncio.create_task(container_events())
|
||||
nonlocal _container_events_task
|
||||
_container_events_task = asyncio.create_task(container_events())
|
||||
|
||||
with patch.object(
|
||||
AddonBuild, "is_valid", new=PropertyMock(return_value=True)
|
||||
@ -208,9 +214,7 @@ async def test_api_addon_rebuild_healthcheck(
|
||||
Addon, "need_build", new=PropertyMock(return_value=True)
|
||||
), patch.object(
|
||||
CpuArch, "supported", new=PropertyMock(return_value=["amd64"])
|
||||
), patch.object(
|
||||
DockerAddon, "run", new=container_events_task
|
||||
):
|
||||
), patch.object(DockerAddon, "run", new=container_events_task):
|
||||
resp = await api_client.post("/addons/local_ssh/rebuild")
|
||||
|
||||
assert state_changes == [AddonState.STOPPED, AddonState.STARTUP]
|
||||
|
@ -93,8 +93,8 @@ async def test_jobs_tree_representation(api_client: TestClient, coresys: CoreSys
|
||||
await self.event.wait()
|
||||
|
||||
test = TestClass(coresys)
|
||||
asyncio.create_task(test.test_jobs_tree_outer())
|
||||
asyncio.create_task(test.test_jobs_tree_alt())
|
||||
outer_task = asyncio.create_task(test.test_jobs_tree_outer())
|
||||
alt_task = asyncio.create_task(test.test_jobs_tree_alt())
|
||||
await asyncio.sleep(0)
|
||||
|
||||
resp = await api_client.get("/jobs/info")
|
||||
@ -150,6 +150,8 @@ async def test_jobs_tree_representation(api_client: TestClient, coresys: CoreSys
|
||||
"errors": [],
|
||||
},
|
||||
]
|
||||
await outer_task
|
||||
await alt_task
|
||||
|
||||
|
||||
async def test_job_manual_cleanup(api_client: TestClient, coresys: CoreSys):
|
||||
|
@ -128,7 +128,7 @@ async def test_api_network_interface_update(
|
||||
)
|
||||
result = await resp.json()
|
||||
assert result["result"] == "ok"
|
||||
assert network_manager_service.CheckConnectivity.calls == [tuple()]
|
||||
assert network_manager_service.CheckConnectivity.calls == [()]
|
||||
assert len(connection_settings_service.Update.calls) == 1
|
||||
|
||||
await connection_settings_service.ping()
|
||||
@ -221,7 +221,7 @@ async def test_api_network_reload(
|
||||
|
||||
assert result["result"] == "ok"
|
||||
# Check that we forced NM to do an immediate connectivity check
|
||||
assert network_manager_service.CheckConnectivity.calls == [tuple()]
|
||||
assert network_manager_service.CheckConnectivity.calls == [()]
|
||||
|
||||
|
||||
async def test_api_network_vlan(
|
||||
|
@ -22,7 +22,7 @@ from tests.dbus_service_mocks.base import DBusServiceMock
|
||||
|
||||
@pytest.fixture(name="boards_service")
|
||||
async def fixture_boards_service(
|
||||
os_agent_services: dict[str, DBusServiceMock]
|
||||
os_agent_services: dict[str, DBusServiceMock],
|
||||
) -> BoardsService:
|
||||
"""Return mock Boards service."""
|
||||
yield os_agent_services["agent_boards"]
|
||||
|
@ -35,9 +35,9 @@ async def test_api_resolution_base(coresys: CoreSys, api_client):
|
||||
result = await resp.json()
|
||||
assert UnsupportedReason.OS in result["data"][ATTR_UNSUPPORTED]
|
||||
assert (
|
||||
SuggestionType.CLEAR_FULL_BACKUP == result["data"][ATTR_SUGGESTIONS][-1]["type"]
|
||||
result["data"][ATTR_SUGGESTIONS][-1]["type"] == SuggestionType.CLEAR_FULL_BACKUP
|
||||
)
|
||||
assert IssueType.FREE_SPACE == result["data"][ATTR_ISSUES][-1]["type"]
|
||||
assert result["data"][ATTR_ISSUES][-1]["type"] == IssueType.FREE_SPACE
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -47,7 +47,7 @@ async def test_api_resolution_dismiss_suggestion(coresys: CoreSys, api_client):
|
||||
SuggestionType.CLEAR_FULL_BACKUP, ContextType.SYSTEM
|
||||
)
|
||||
|
||||
assert SuggestionType.CLEAR_FULL_BACKUP == coresys.resolution.suggestions[-1].type
|
||||
assert coresys.resolution.suggestions[-1].type == SuggestionType.CLEAR_FULL_BACKUP
|
||||
await api_client.delete(f"/resolution/suggestion/{clear_backup.uuid}")
|
||||
assert clear_backup not in coresys.resolution.suggestions
|
||||
|
||||
@ -87,7 +87,7 @@ async def test_api_resolution_dismiss_issue(coresys: CoreSys, api_client):
|
||||
IssueType.UPDATE_FAILED, ContextType.SYSTEM
|
||||
)
|
||||
|
||||
assert IssueType.UPDATE_FAILED == coresys.resolution.issues[-1].type
|
||||
assert coresys.resolution.issues[-1].type == IssueType.UPDATE_FAILED
|
||||
await api_client.delete(f"/resolution/issue/{updated_failed.uuid}")
|
||||
assert updated_failed not in coresys.resolution.issues
|
||||
|
||||
@ -99,7 +99,7 @@ async def test_api_resolution_unhealthy(coresys: CoreSys, api_client):
|
||||
|
||||
resp = await api_client.get("/resolution/info")
|
||||
result = await resp.json()
|
||||
assert UnhealthyReason.DOCKER == result["data"][ATTR_UNHEALTHY][-1]
|
||||
assert result["data"][ATTR_UNHEALTHY][-1] == UnhealthyReason.DOCKER
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -45,10 +45,8 @@ async def test_api_store(
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_store_addons(api_client: TestClient, store_addon: AddonStore):
|
||||
"""Test /store/addons REST API."""
|
||||
print("test")
|
||||
resp = await api_client.get("/store/addons")
|
||||
result = await resp.json()
|
||||
print(result)
|
||||
|
||||
assert result["data"]["addons"][-1]["slug"] == store_addon.slug
|
||||
|
||||
@ -139,6 +137,7 @@ async def test_api_store_update_healthcheck(
|
||||
assert install_addon_ssh.need_update is True
|
||||
|
||||
state_changes: list[AddonState] = []
|
||||
_container_events_task: asyncio.Task | None = None
|
||||
|
||||
async def container_events():
|
||||
nonlocal state_changes
|
||||
@ -174,7 +173,8 @@ async def test_api_store_update_healthcheck(
|
||||
)
|
||||
|
||||
async def container_events_task(*args, **kwargs):
|
||||
asyncio.create_task(container_events())
|
||||
nonlocal _container_events_task
|
||||
_container_events_task = asyncio.create_task(container_events())
|
||||
|
||||
with patch.object(DockerAddon, "run", new=container_events_task), patch.object(
|
||||
DockerInterface, "install"
|
||||
@ -186,3 +186,5 @@ async def test_api_store_update_healthcheck(
|
||||
assert state_changes == [AddonState.STOPPED, AddonState.STARTUP]
|
||||
assert install_addon_ssh.state == AddonState.STARTED
|
||||
assert resp.status == 200
|
||||
|
||||
await _container_events_task
|
||||
|
@ -69,9 +69,7 @@ async def test_api_supervisor_options_repositories_skipped_on_error(
|
||||
"supervisor.store.repository.Repository.load", side_effect=git_error
|
||||
), patch(
|
||||
"supervisor.store.repository.Repository.validate", return_value=False
|
||||
), patch(
|
||||
"supervisor.store.repository.Repository.remove"
|
||||
):
|
||||
), patch("supervisor.store.repository.Repository.remove"):
|
||||
response = await api_client.post(
|
||||
"/supervisor/options", json={"addons_repositories": [REPO_URL]}
|
||||
)
|
||||
|
@ -806,6 +806,7 @@ async def test_backup_with_healthcheck(
|
||||
assert install_addon_ssh.state == AddonState.STARTUP
|
||||
|
||||
state_changes: list[AddonState] = []
|
||||
_container_events_task: asyncio.Task | None = None
|
||||
|
||||
async def container_events():
|
||||
nonlocal state_changes
|
||||
@ -841,7 +842,8 @@ async def test_backup_with_healthcheck(
|
||||
)
|
||||
|
||||
async def container_events_task(*args, **kwargs):
|
||||
asyncio.create_task(container_events())
|
||||
nonlocal _container_events_task
|
||||
_container_events_task = asyncio.create_task(container_events())
|
||||
|
||||
with patch.object(DockerAddon, "run", new=container_events_task), patch.object(
|
||||
AddonModel, "backup_mode", new=PropertyMock(return_value=AddonBackupMode.COLD)
|
||||
@ -855,6 +857,8 @@ async def test_backup_with_healthcheck(
|
||||
assert install_addon_ssh.state == AddonState.STARTED
|
||||
assert coresys.core.state == CoreState.RUNNING
|
||||
|
||||
await _container_events_task
|
||||
|
||||
|
||||
async def test_restore_with_healthcheck(
|
||||
coresys: CoreSys,
|
||||
@ -877,6 +881,7 @@ async def test_restore_with_healthcheck(
|
||||
homeassistant=False, addons=["local_ssh"]
|
||||
)
|
||||
state_changes: list[AddonState] = []
|
||||
_container_events_task: asyncio.Task | None = None
|
||||
|
||||
async def container_events():
|
||||
nonlocal state_changes
|
||||
@ -911,7 +916,8 @@ async def test_restore_with_healthcheck(
|
||||
)
|
||||
|
||||
async def container_events_task(*args, **kwargs):
|
||||
asyncio.create_task(container_events())
|
||||
nonlocal _container_events_task
|
||||
_container_events_task = asyncio.create_task(container_events())
|
||||
|
||||
with patch.object(DockerAddon, "run", new=container_events_task), patch.object(
|
||||
DockerAddon, "is_running", return_value=False
|
||||
@ -924,6 +930,8 @@ async def test_restore_with_healthcheck(
|
||||
assert install_addon_ssh.state == AddonState.STARTED
|
||||
assert coresys.core.state == CoreState.RUNNING
|
||||
|
||||
await _container_events_task
|
||||
|
||||
|
||||
def _make_backup_message_for_assert(
|
||||
*,
|
||||
@ -1077,9 +1085,7 @@ async def test_restore_progress(
|
||||
HomeAssistant, "restore"
|
||||
), patch.object(HomeAssistantCore, "update"), patch.object(
|
||||
AddonModel, "_validate_availability"
|
||||
), patch.object(
|
||||
AddonModel, "with_ingress", new=PropertyMock(return_value=False)
|
||||
):
|
||||
), patch.object(AddonModel, "with_ingress", new=PropertyMock(return_value=False)):
|
||||
await coresys.backups.do_restore_full(full_backup)
|
||||
await asyncio.sleep(0)
|
||||
|
||||
@ -1382,9 +1388,7 @@ async def test_restore_only_reloads_ingress_on_change(
|
||||
HomeAssistantCore, "is_running", new=mock_is_running
|
||||
), patch.object(AddonModel, "_validate_availability"), patch.object(
|
||||
DockerAddon, "attach"
|
||||
), patch.object(
|
||||
HomeAssistantAPI, "make_request"
|
||||
) as make_request:
|
||||
), patch.object(HomeAssistantAPI, "make_request") as make_request:
|
||||
make_request.return_value.__aenter__.return_value.status = 200
|
||||
|
||||
# Has ingress before and after - not called
|
||||
|
@ -98,9 +98,7 @@ async def docker() -> DockerAPI:
|
||||
), patch(
|
||||
"supervisor.docker.manager.DockerConfig",
|
||||
return_value=MagicMock(),
|
||||
), patch(
|
||||
"supervisor.docker.manager.DockerAPI.unload"
|
||||
):
|
||||
), patch("supervisor.docker.manager.DockerAPI.unload"):
|
||||
docker_obj = DockerAPI(MagicMock())
|
||||
with patch("supervisor.docker.monitor.DockerMonitor.load"):
|
||||
await docker_obj.load()
|
||||
@ -184,7 +182,7 @@ async def network_manager(
|
||||
|
||||
@pytest.fixture
|
||||
async def network_manager_service(
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> NetworkManagerService:
|
||||
"""Return Network Manager service mock."""
|
||||
yield network_manager_services["network_manager"]
|
||||
@ -192,7 +190,7 @@ async def network_manager_service(
|
||||
|
||||
@pytest.fixture(name="connection_settings_service")
|
||||
async def fixture_connection_settings_service(
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> ConnectionSettingsService:
|
||||
"""Return mock connection settings service."""
|
||||
yield network_manager_services["network_connection_settings"]
|
||||
@ -276,19 +274,24 @@ async def fixture_all_dbus_services(
|
||||
) -> dict[str, DBusServiceMock | dict[str, DBusServiceMock]]:
|
||||
"""Mock all dbus services supervisor uses."""
|
||||
yield (
|
||||
await mock_dbus_services(
|
||||
{
|
||||
"hostname": None,
|
||||
"logind": None,
|
||||
"rauc": None,
|
||||
"resolved": None,
|
||||
"systemd": None,
|
||||
"systemd_unit": None,
|
||||
"timedate": None,
|
||||
},
|
||||
dbus_session_bus,
|
||||
(
|
||||
await mock_dbus_services(
|
||||
{
|
||||
"hostname": None,
|
||||
"logind": None,
|
||||
"rauc": None,
|
||||
"resolved": None,
|
||||
"systemd": None,
|
||||
"systemd_unit": None,
|
||||
"timedate": None,
|
||||
},
|
||||
dbus_session_bus,
|
||||
)
|
||||
)
|
||||
) | network_manager_services | udisks2_services | os_agent_services
|
||||
| network_manager_services
|
||||
| udisks2_services
|
||||
| os_agent_services
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -599,7 +602,7 @@ async def backups(
|
||||
ATTR_SLUG: slug,
|
||||
ATTR_DATE: utcnow().isoformat(),
|
||||
ATTR_TYPE: BackupType.PARTIAL
|
||||
if "1" == slug[-1] or "5" == slug[-1]
|
||||
if slug[-1] == "1" or slug[-1] == "5"
|
||||
else BackupType.FULL,
|
||||
}
|
||||
coresys.backups._backups[backup.slug] = backup
|
||||
@ -618,9 +621,7 @@ async def journald_logs(coresys: CoreSys) -> MagicMock:
|
||||
LogsControl,
|
||||
"get_identifiers",
|
||||
return_value=["hassio_supervisor", "hassos-config", "kernel"],
|
||||
), patch.object(
|
||||
LogsControl, "journald_logs", new=MagicMock()
|
||||
) as logs:
|
||||
), patch.object(LogsControl, "journald_logs", new=MagicMock()) as logs:
|
||||
await coresys.host.logs.load()
|
||||
yield logs
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
"""Test Green board."""
|
||||
# pylint: disable=import-error
|
||||
import asyncio
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
@ -22,12 +21,7 @@ async def fixture_green_service(dbus_session_bus: MessageBus) -> GreenService:
|
||||
|
||||
async def test_dbus_green(green_service: GreenService, dbus_session_bus: MessageBus):
|
||||
"""Test Green board load."""
|
||||
with patch("supervisor.utils.common.Path.is_file", return_value=True), patch(
|
||||
"supervisor.utils.common.read_json_file",
|
||||
return_value={"activity_led": False, "user_led": False},
|
||||
):
|
||||
green = Green()
|
||||
|
||||
green = Green()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
assert green.name == "Green"
|
||||
@ -35,8 +29,12 @@ async def test_dbus_green(green_service: GreenService, dbus_session_bus: Message
|
||||
assert green.power_led is True
|
||||
assert green.user_led is True
|
||||
|
||||
await asyncio.sleep(0)
|
||||
await green_service.ping()
|
||||
with patch("supervisor.utils.common.Path.is_file", return_value=True), patch(
|
||||
"supervisor.utils.common.read_json_file",
|
||||
return_value={"activity_led": False, "user_led": False},
|
||||
):
|
||||
green = Green()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
assert green.activity_led is False
|
||||
assert green.user_led is False
|
||||
@ -49,8 +47,7 @@ async def test_dbus_green_set_activity_led(
|
||||
green = Green()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
green.activity_led = False
|
||||
await asyncio.sleep(0) # Set property via dbus is separate async task
|
||||
await green.set_activity_led(False)
|
||||
await green_service.ping()
|
||||
assert green.activity_led is False
|
||||
|
||||
@ -62,8 +59,7 @@ async def test_dbus_green_set_power_led(
|
||||
green = Green()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
green.power_led = False
|
||||
await asyncio.sleep(0) # Set property via dbus is separate async task
|
||||
await green.set_power_led(False)
|
||||
await green_service.ping()
|
||||
assert green.power_led is False
|
||||
|
||||
@ -75,7 +71,6 @@ async def test_dbus_green_set_user_led(
|
||||
green = Green()
|
||||
await green.connect(dbus_session_bus)
|
||||
|
||||
green.user_led = False
|
||||
await asyncio.sleep(0) # Set property via dbus is separate async task
|
||||
await green.set_user_led(False)
|
||||
await green_service.ping()
|
||||
assert green.user_led is False
|
||||
|
@ -1,6 +1,5 @@
|
||||
"""Test Yellow board."""
|
||||
# pylint: disable=import-error
|
||||
import asyncio
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
@ -22,11 +21,7 @@ async def fixture_yellow_service(dbus_session_bus: MessageBus) -> YellowService:
|
||||
|
||||
async def test_dbus_yellow(yellow_service: YellowService, dbus_session_bus: MessageBus):
|
||||
"""Test Yellow board load."""
|
||||
with patch("supervisor.utils.common.Path.is_file", return_value=True), patch(
|
||||
"supervisor.utils.common.read_json_file",
|
||||
return_value={"disk_led": False, "heartbeat_led": False},
|
||||
):
|
||||
yellow = Yellow()
|
||||
yellow = Yellow()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
assert yellow.name == "Yellow"
|
||||
@ -34,8 +29,12 @@ async def test_dbus_yellow(yellow_service: YellowService, dbus_session_bus: Mess
|
||||
assert yellow.heartbeat_led is True
|
||||
assert yellow.power_led is True
|
||||
|
||||
await asyncio.sleep(0)
|
||||
await yellow_service.ping()
|
||||
with patch("supervisor.utils.common.Path.is_file", return_value=True), patch(
|
||||
"supervisor.utils.common.read_json_file",
|
||||
return_value={"disk_led": False, "heartbeat_led": False},
|
||||
):
|
||||
yellow = Yellow()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
assert yellow.disk_led is False
|
||||
assert yellow.heartbeat_led is False
|
||||
@ -48,8 +47,7 @@ async def test_dbus_yellow_set_disk_led(
|
||||
yellow = Yellow()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
yellow.disk_led = False
|
||||
await asyncio.sleep(0) # Set property via dbus is separate async task
|
||||
await yellow.set_disk_led(False)
|
||||
await yellow_service.ping()
|
||||
assert yellow.disk_led is False
|
||||
|
||||
@ -61,8 +59,7 @@ async def test_dbus_yellow_set_heartbeat_led(
|
||||
yellow = Yellow()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
yellow.heartbeat_led = False
|
||||
await asyncio.sleep(0) # Set property via dbus is separate async task
|
||||
await yellow.set_heartbeat_led(False)
|
||||
await yellow_service.ping()
|
||||
assert yellow.heartbeat_led is False
|
||||
|
||||
@ -74,7 +71,6 @@ async def test_dbus_yellow_set_power_led(
|
||||
yellow = Yellow()
|
||||
await yellow.connect(dbus_session_bus)
|
||||
|
||||
yellow.power_led = False
|
||||
await asyncio.sleep(0) # Set property via dbus is separate async task
|
||||
await yellow.set_power_led(False)
|
||||
await yellow_service.ping()
|
||||
assert yellow.power_led is False
|
||||
|
@ -12,7 +12,7 @@ from tests.dbus_service_mocks.os_agent import OSAgent as OSAgentService
|
||||
|
||||
@pytest.fixture(name="os_agent_service")
|
||||
async def fixture_os_agent_service(
|
||||
os_agent_services: dict[str, DBusServiceMock]
|
||||
os_agent_services: dict[str, DBusServiceMock],
|
||||
) -> OSAgentService:
|
||||
"""Mock OS Agent dbus service."""
|
||||
yield os_agent_services["os_agent"]
|
||||
|
@ -14,7 +14,7 @@ from tests.dbus_service_mocks.base import DBusServiceMock
|
||||
|
||||
@pytest.fixture(name="apparmor_service", autouse=True)
|
||||
async def fixture_apparmor_service(
|
||||
os_agent_services: dict[str, DBusServiceMock]
|
||||
os_agent_services: dict[str, DBusServiceMock],
|
||||
) -> AppArmorService:
|
||||
"""Mock AppArmor dbus service."""
|
||||
yield os_agent_services["agent_apparmor"]
|
||||
|
@ -12,7 +12,7 @@ from tests.dbus_service_mocks.base import DBusServiceMock
|
||||
|
||||
@pytest.fixture(name="cgroup_service", autouse=True)
|
||||
async def fixture_cgroup_service(
|
||||
os_agent_services: dict[str, DBusServiceMock]
|
||||
os_agent_services: dict[str, DBusServiceMock],
|
||||
) -> CGroupService:
|
||||
"""Mock CGroup dbus service."""
|
||||
yield os_agent_services["agent_cgroup"]
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""Test Datadisk/Agent dbus interface."""
|
||||
# pylint: disable=import-error
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
@ -14,7 +14,7 @@ from tests.dbus_service_mocks.base import DBusServiceMock
|
||||
|
||||
@pytest.fixture(name="datadisk_service", autouse=True)
|
||||
async def fixture_datadisk_service(
|
||||
os_agent_services: dict[str, DBusServiceMock]
|
||||
os_agent_services: dict[str, DBusServiceMock],
|
||||
) -> DataDiskService:
|
||||
"""Mock DataDisk dbus service."""
|
||||
yield os_agent_services["agent_datadisk"]
|
||||
@ -71,7 +71,7 @@ async def test_dbus_osagent_datadisk_reload_device(
|
||||
await os_agent.connect(dbus_session_bus)
|
||||
|
||||
assert await os_agent.datadisk.reload_device() is None
|
||||
assert datadisk_service.ReloadDevice.calls == [tuple()]
|
||||
assert datadisk_service.ReloadDevice.calls == [()]
|
||||
|
||||
|
||||
async def test_dbus_osagent_datadisk_mark_data_move(
|
||||
@ -87,4 +87,4 @@ async def test_dbus_osagent_datadisk_mark_data_move(
|
||||
await os_agent.connect(dbus_session_bus)
|
||||
|
||||
assert await os_agent.datadisk.mark_data_move() is None
|
||||
assert datadisk_service.MarkDataMove.calls == [tuple()]
|
||||
assert datadisk_service.MarkDataMove.calls == [()]
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""Test System/Agent dbus interface."""
|
||||
# pylint: disable=import-error
|
||||
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
import pytest
|
||||
|
||||
@ -12,7 +12,7 @@ from tests.dbus_service_mocks.base import DBusServiceMock
|
||||
|
||||
@pytest.fixture(name="system_service", autouse=True)
|
||||
async def fixture_system_service(
|
||||
os_agent_services: dict[str, DBusServiceMock]
|
||||
os_agent_services: dict[str, DBusServiceMock],
|
||||
) -> SystemService:
|
||||
"""Mock System dbus service."""
|
||||
yield os_agent_services["agent_system"]
|
||||
@ -31,4 +31,4 @@ async def test_dbus_osagent_system_wipe(
|
||||
await os_agent.connect(dbus_session_bus)
|
||||
|
||||
assert await os_agent.system.schedule_wipe_device() is None
|
||||
assert system_service.ScheduleWipeDevice.calls == [tuple()]
|
||||
assert system_service.ScheduleWipeDevice.calls == [()]
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from dbus_fast import Variant
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
from dbus_fast.signature import Variant
|
||||
import pytest
|
||||
|
||||
from supervisor.dbus.network.interface import NetworkInterface
|
||||
@ -20,7 +20,7 @@ from tests.dbus_service_mocks.network_connection_settings import (
|
||||
|
||||
@pytest.fixture(name="connection_settings_service", autouse=True)
|
||||
async def fixture_connection_settings_service(
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> ConnectionSettingsService:
|
||||
"""Mock Connection Settings service."""
|
||||
yield network_manager_services["network_connection_settings"]
|
||||
@ -125,9 +125,9 @@ async def test_watching_updated_signal(
|
||||
settings = NetworkSetting("/org/freedesktop/NetworkManager/Settings/1")
|
||||
await settings.connect(dbus_session_bus)
|
||||
|
||||
connection_settings_service.GetSettings.calls == [tuple()]
|
||||
assert connection_settings_service.GetSettings.calls == [()]
|
||||
|
||||
connection_settings_service.Updated()
|
||||
await connection_settings_service.ping()
|
||||
await connection_settings_service.ping()
|
||||
assert connection_settings_service.GetSettings.calls == [tuple(), tuple()]
|
||||
assert connection_settings_service.GetSettings.calls == [(), ()]
|
||||
|
@ -16,7 +16,7 @@ from tests.dbus_service_mocks.network_active_connection import (
|
||||
|
||||
@pytest.fixture(name="active_connection_service", autouse=True)
|
||||
async def fixture_active_connection_service(
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> ActiveConnectionService:
|
||||
"""Mock Active Connection service."""
|
||||
yield network_manager_services["network_active_connection"]
|
||||
|
@ -17,7 +17,7 @@ from tests.dbus_service_mocks.network_device import Device as DeviceService
|
||||
|
||||
@pytest.fixture(name="device_eth0_service")
|
||||
async def fixture_device_eth0_service(
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> DeviceService:
|
||||
"""Mock Device eth0 service."""
|
||||
yield network_manager_services["network_device"][
|
||||
@ -27,7 +27,7 @@ async def fixture_device_eth0_service(
|
||||
|
||||
@pytest.fixture(name="device_wlan0_service")
|
||||
async def fixture_device_wlan0_service(
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> DeviceService:
|
||||
"""Mock Device wlan0 service."""
|
||||
yield network_manager_services["network_device"][
|
||||
|
@ -60,6 +60,7 @@ async def test_network_manager_version(
|
||||
network_manager_service: NetworkManagerService, network_manager: NetworkManager
|
||||
):
|
||||
"""Test if version validate work."""
|
||||
# pylint: disable=protected-access
|
||||
await network_manager._validate_version()
|
||||
assert network_manager.version == "1.22.10"
|
||||
|
||||
@ -67,6 +68,7 @@ async def test_network_manager_version(
|
||||
with pytest.raises(HostNotSupportedError):
|
||||
await network_manager._validate_version()
|
||||
assert network_manager.version == "1.13.9"
|
||||
# pylint: enable=protected-access
|
||||
|
||||
|
||||
async def test_check_connectivity(
|
||||
@ -79,7 +81,7 @@ async def test_check_connectivity(
|
||||
assert network_manager_service.CheckConnectivity.calls == []
|
||||
|
||||
assert await network_manager.check_connectivity(force=True) == 4
|
||||
assert network_manager_service.CheckConnectivity.calls == [tuple()]
|
||||
assert network_manager_service.CheckConnectivity.calls == [()]
|
||||
|
||||
|
||||
async def test_activate_connection(
|
||||
|
@ -54,7 +54,7 @@ async def test_reload_connections(
|
||||
await settings.connect(dbus_session_bus)
|
||||
|
||||
assert await settings.reload_connections() is True
|
||||
assert settings_service.ReloadConnections.calls == [tuple()]
|
||||
assert settings_service.ReloadConnections.calls == [()]
|
||||
|
||||
|
||||
async def test_dbus_network_settings_connect_error(
|
||||
|
@ -15,7 +15,7 @@ from tests.dbus_service_mocks.network_device_wireless import (
|
||||
|
||||
@pytest.fixture(name="device_wireless_service", autouse=True)
|
||||
async def fixture_device_wireless_service(
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
network_manager_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> DeviceWirelessService:
|
||||
"""Mock Device Wireless service."""
|
||||
yield network_manager_services["network_device_wireless"]
|
||||
|
@ -42,7 +42,7 @@ async def test_reboot(systemd_service: SystemdService, dbus_session_bus: Message
|
||||
await systemd.connect(dbus_session_bus)
|
||||
|
||||
assert await systemd.reboot() is None
|
||||
assert systemd_service.Reboot.calls == [tuple()]
|
||||
assert systemd_service.Reboot.calls == [()]
|
||||
|
||||
|
||||
async def test_power_off(systemd_service: SystemdService, dbus_session_bus: MessageBus):
|
||||
@ -56,7 +56,7 @@ async def test_power_off(systemd_service: SystemdService, dbus_session_bus: Mess
|
||||
await systemd.connect(dbus_session_bus)
|
||||
|
||||
assert await systemd.power_off() is None
|
||||
assert systemd_service.PowerOff.calls == [tuple()]
|
||||
assert systemd_service.PowerOff.calls == [()]
|
||||
|
||||
|
||||
async def test_start_unit(
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Test TimeDate dbus interface."""
|
||||
# pylint: disable=import-error
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
import pytest
|
||||
@ -29,9 +29,7 @@ async def test_timedate_info(
|
||||
|
||||
await timedate.connect(dbus_session_bus)
|
||||
|
||||
assert timedate.dt_utc == datetime(
|
||||
2021, 5, 19, 8, 36, 54, 405718, tzinfo=timezone.utc
|
||||
)
|
||||
assert timedate.dt_utc == datetime(2021, 5, 19, 8, 36, 54, 405718, tzinfo=UTC)
|
||||
assert timedate.ntp is True
|
||||
|
||||
assert timedate.dt_utc.isoformat() == "2021-05-19T08:36:54.405718+00:00"
|
||||
@ -53,7 +51,7 @@ async def test_dbus_settime(
|
||||
timedate_service.SetTime.calls.clear()
|
||||
timedate = TimeDate()
|
||||
|
||||
test_dt = datetime(2021, 5, 19, 8, 36, 54, 405718, tzinfo=timezone.utc)
|
||||
test_dt = datetime(2021, 5, 19, 8, 36, 54, 405718, tzinfo=UTC)
|
||||
|
||||
with pytest.raises(DBusNotConnectedError):
|
||||
await timedate.set_time(test_dt)
|
||||
|
@ -22,7 +22,7 @@ from tests.dbus_service_mocks.udisks2_block import Block as BlockService
|
||||
|
||||
@pytest.fixture(name="block_sda_service")
|
||||
async def fixture_block_sda_service(
|
||||
udisks2_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
udisks2_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> BlockService:
|
||||
"""Mock sda Block service."""
|
||||
yield udisks2_services["udisks2_block"][
|
||||
@ -32,7 +32,7 @@ async def fixture_block_sda_service(
|
||||
|
||||
@pytest.fixture(name="block_sda1_service")
|
||||
async def fixture_block_sda1_service(
|
||||
udisks2_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
udisks2_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> BlockService:
|
||||
"""Mock sda1 Block service."""
|
||||
yield udisks2_services["udisks2_block"][
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Test UDisks2 Drive."""
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from dbus_fast import Variant
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
@ -59,7 +59,7 @@ async def test_drive_info(
|
||||
assert ssk.vendor == "SSK"
|
||||
assert ssk.model == "SSK Storage"
|
||||
assert ssk.size == 250059350016
|
||||
assert ssk.time_detected == datetime(2023, 2, 8, 23, 1, 44, 240492, timezone.utc)
|
||||
assert ssk.time_detected == datetime(2023, 2, 8, 23, 1, 44, 240492, UTC)
|
||||
assert ssk.ejectable is False
|
||||
|
||||
drive_ssk_storage_service.emit_properties_changed({"Ejectable": True})
|
||||
|
@ -21,7 +21,7 @@ from tests.dbus_service_mocks.udisks2_manager import (
|
||||
|
||||
@pytest.fixture(name="udisks2_manager_service")
|
||||
async def fixture_udisks2_manager_service(
|
||||
udisks2_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]]
|
||||
udisks2_services: dict[str, DBusServiceMock | dict[str, DBusServiceMock]],
|
||||
) -> UDisks2ManagerService:
|
||||
"""Mock UDisks2 Manager service."""
|
||||
yield udisks2_services["udisks2_manager"]
|
||||
@ -76,7 +76,9 @@ async def test_udisks2_manager_info(
|
||||
udisks2_manager_service.emit_properties_changed({}, ["SupportedFilesystems"])
|
||||
await udisks2_manager_service.ping()
|
||||
await udisks2_manager_service.ping()
|
||||
await udisks2_manager_service.ping() # Three pings: signal, get all properties and get block devices
|
||||
await (
|
||||
udisks2_manager_service.ping()
|
||||
) # Three pings: signal, get all properties and get block devices
|
||||
assert udisks2.supported_filesystems == [
|
||||
"ext4",
|
||||
"vfat",
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return AppArmor()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class AppArmor(DBusServiceMock):
|
||||
"""AppArmor mock.
|
||||
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Boards()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Boards(DBusServiceMock):
|
||||
"""Boards mock.
|
||||
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Green()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Green(DBusServiceMock):
|
||||
"""Green mock.
|
||||
|
||||
|
@ -10,9 +10,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Supervised()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Supervised(DBusServiceMock):
|
||||
"""Supervised mock.
|
||||
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Yellow()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Yellow(DBusServiceMock):
|
||||
"""Yellow mock.
|
||||
|
||||
|
@ -10,9 +10,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return CGroup()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class CGroup(DBusServiceMock):
|
||||
"""CGroup mock.
|
||||
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return DataDisk()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class DataDisk(DBusServiceMock):
|
||||
"""DataDisk mock.
|
||||
|
||||
|
@ -10,9 +10,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return System()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class System(DBusServiceMock):
|
||||
"""System mock.
|
||||
|
||||
|
@ -14,9 +14,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Hostname()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Hostname(DBusServiceMock):
|
||||
"""Hostname mock.
|
||||
|
||||
|
@ -10,9 +10,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Logind()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name,missing-function-docstring
|
||||
|
||||
|
||||
class Logind(DBusServiceMock):
|
||||
"""Logind mock.
|
||||
|
||||
|
@ -16,9 +16,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return AccessPoint(object_path if object_path else DEFAULT_OBJECT_PATH)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class AccessPointFixture:
|
||||
"""Access Point fixture."""
|
||||
|
@ -15,9 +15,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return ActiveConnection(object_path if object_path else DEFAULT_OBJECT_PATH)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class ActiveConnectionFixture:
|
||||
"""Active Connection fixture."""
|
||||
|
@ -87,9 +87,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return ConnectionSettings(object_path if object_path else DEFAULT_OBJECT_PATH)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class ConnectionSettings(DBusServiceMock):
|
||||
"""Connection Settings mock.
|
||||
|
||||
|
@ -17,9 +17,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Device(object_path if object_path else DEFAULT_OBJECT_PATH)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class DeviceFixture:
|
||||
"""Device fixture."""
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return DeviceWireless()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class DeviceWireless(DBusServiceMock):
|
||||
"""Device Wireless mock.
|
||||
|
||||
|
@ -13,9 +13,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return DnsManager()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class DnsManager(DBusServiceMock):
|
||||
"""DNS Manager mock.
|
||||
|
||||
|
@ -13,9 +13,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return IP4Config()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class IP4Config(DBusServiceMock):
|
||||
"""IP4Config mock.
|
||||
|
||||
|
@ -13,9 +13,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return IP6Config()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class IP6Config(DBusServiceMock):
|
||||
"""IP6Config mock.
|
||||
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return NetworkManager()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class NetworkManager(DBusServiceMock):
|
||||
"""Network Manager mock.
|
||||
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Settings()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Settings(DBusServiceMock):
|
||||
"""Settings mock.
|
||||
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return OSAgent()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class OSAgent(DBusServiceMock):
|
||||
"""OS-agent mock.
|
||||
|
||||
|
@ -13,9 +13,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Rauc()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Rauc(DBusServiceMock):
|
||||
"""Rauc mock.
|
||||
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Resolved()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Resolved(DBusServiceMock):
|
||||
"""Resolved mock.
|
||||
|
||||
|
@ -14,9 +14,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Systemd()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name,missing-function-docstring,raising-bad-type
|
||||
|
||||
|
||||
class Systemd(DBusServiceMock):
|
||||
"""Systemd mock.
|
||||
|
||||
@ -668,7 +665,7 @@ class Systemd(DBusServiceMock):
|
||||
def StopUnit(self, name: "s", mode: "s") -> "o":
|
||||
"""Stop a service unit."""
|
||||
if isinstance(self.response_stop_unit, DBusError):
|
||||
raise self.response_stop_unit
|
||||
raise self.response_stop_unit # pylint: disable=raising-bad-type
|
||||
if self.mock_systemd_unit:
|
||||
self.mock_systemd_unit.active_state = "inactive"
|
||||
return self.response_stop_unit
|
||||
@ -677,7 +674,7 @@ class Systemd(DBusServiceMock):
|
||||
def ReloadOrRestartUnit(self, name: "s", mode: "s") -> "o":
|
||||
"""Reload or restart a service unit."""
|
||||
if isinstance(self.response_reload_or_restart_unit, DBusError):
|
||||
raise self.response_reload_or_restart_unit
|
||||
raise self.response_reload_or_restart_unit # pylint: disable=raising-bad-type
|
||||
if self.mock_systemd_unit:
|
||||
self.mock_systemd_unit.active_state = "active"
|
||||
return self.response_reload_or_restart_unit
|
||||
@ -695,7 +692,7 @@ class Systemd(DBusServiceMock):
|
||||
) -> "o":
|
||||
"""Start a transient service unit."""
|
||||
if isinstance(self.response_start_transient_unit, DBusError):
|
||||
raise self.response_start_transient_unit
|
||||
raise self.response_start_transient_unit # pylint: disable=raising-bad-type
|
||||
if self.mock_systemd_unit:
|
||||
self.mock_systemd_unit.active_state = "active"
|
||||
return self.response_start_transient_unit
|
||||
|
@ -13,9 +13,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return SystemdUnit(object_path or DEFAULT_OBJECT_PATH)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name,missing-function-docstring
|
||||
|
||||
|
||||
class SystemdUnit(DBusServiceMock):
|
||||
"""Systemd Unit mock.
|
||||
|
||||
@ -196,7 +193,7 @@ class SystemdUnit(DBusServiceMock):
|
||||
@dbus_property(access=PropertyAccess.READ)
|
||||
def RequiresMountsFor(self) -> "as":
|
||||
"""Get RequiresMountsFor."""
|
||||
return ["/tmp"]
|
||||
return ["/tmp"] # noqa: S108
|
||||
|
||||
@dbus_property(access=PropertyAccess.READ)
|
||||
def Documentation(self) -> "as":
|
||||
@ -206,7 +203,7 @@ class SystemdUnit(DBusServiceMock):
|
||||
@dbus_property(access=PropertyAccess.READ)
|
||||
def Description(self) -> "s":
|
||||
"""Get Description."""
|
||||
return "/tmp/yellow"
|
||||
return "/tmp/yellow" # noqa: S108
|
||||
|
||||
@dbus_property(access=PropertyAccess.READ)
|
||||
def AccessSELinuxContext(self) -> "s":
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return TimeDate()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class TimeDate(DBusServiceMock):
|
||||
"""TimeDate mock.
|
||||
|
||||
|
@ -17,9 +17,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Block(object_path if object_path else DEFAULT_OBJECT_PATH)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class BlockFixture:
|
||||
"""Block fixture."""
|
||||
|
@ -17,9 +17,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Drive(object_path if object_path else DEFAULT_OBJECT_PATH)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class DriveFixture:
|
||||
"""Drive fixture."""
|
||||
|
@ -16,9 +16,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Filesystem(object_path if object_path else DEFAULT_OBJECT_PATH)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class FilesystemFixture:
|
||||
"""Filesystem fixture."""
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Loop()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class Loop(DBusServiceMock):
|
||||
"""Loop mock.
|
||||
|
||||
|
@ -12,9 +12,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return UDisks2Manager()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
class UDisks2Manager(DBusServiceMock):
|
||||
"""UDisks2 Manager mock.
|
||||
|
||||
|
@ -16,9 +16,6 @@ def setup(object_path: str | None = None) -> DBusServiceMock:
|
||||
return Partition(object_path if object_path else DEFAULT_OBJECT_PATH)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class PartitionFixture:
|
||||
"""Partition fixture."""
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user