Compare commits

..

1 Commits

Author SHA1 Message Date
J. Nick Koston
fb1c2467b9 [api] Optimize varint encoding performance for Bluetooth proxy efficiency 2025-08-17 23:55:17 -05:00
3863 changed files with 22454 additions and 53829 deletions

View File

@@ -9,7 +9,7 @@ This document provides essential context for AI models interacting with this pro
## 2. Core Technologies & Stack
* **Languages:** Python (>=3.11), C++ (gnu++20)
* **Languages:** Python (>=3.10), C++ (gnu++20)
* **Frameworks & Runtimes:** PlatformIO, Arduino, ESP-IDF.
* **Build Systems:** PlatformIO is the primary build system. CMake is used as an alternative.
* **Configuration:** YAML.
@@ -38,7 +38,7 @@ This document provides essential context for AI models interacting with this pro
5. **Dashboard** (`esphome/dashboard/`): A web-based interface for device configuration, management, and OTA updates.
* **Platform Support:**
1. **ESP32** (`components/esp32/`): Espressif ESP32 family. Supports multiple variants (Original, C2, C3, C5, C6, H2, P4, S2, S3) with ESP-IDF framework. Arduino framework supports only a subset of the variants (Original, C3, S2, S3).
1. **ESP32** (`components/esp32/`): Espressif ESP32 family. Supports multiple variants (S2, S3, C3, etc.) and both IDF and Arduino frameworks.
2. **ESP8266** (`components/esp8266/`): Espressif ESP8266. Arduino framework only, with memory constraints.
3. **RP2040** (`components/rp2040/`): Raspberry Pi Pico/RP2040. Arduino framework with PIO (Programmable I/O) support.
4. **LibreTiny** (`components/libretiny/`): Realtek and Beken chips. Supports multiple chip families and auto-generated components.
@@ -60,7 +60,7 @@ This document provides essential context for AI models interacting with this pro
├── __init__.py # Component configuration schema and code generation
├── [component].h # C++ header file (if needed)
├── [component].cpp # C++ implementation (if needed)
└── [platform]/ # Platform-specific implementations
└── [platform]/ # Platform-specific implementations
├── __init__.py # Platform-specific configuration
├── [platform].h # Platform C++ header
└── [platform].cpp # Platform C++ implementation
@@ -150,8 +150,7 @@ This document provides essential context for AI models interacting with this pro
* **Configuration Validation:**
* **Common Validators:** `cv.int_`, `cv.float_`, `cv.string`, `cv.boolean`, `cv.int_range(min=0, max=100)`, `cv.positive_int`, `cv.percentage`.
* **Complex Validation:** `cv.All(cv.string, cv.Length(min=1, max=50))`, `cv.Any(cv.int_, cv.string)`.
* **Platform-Specific:** `cv.only_on(["esp32", "esp8266"])`, `esp32.only_on_variant(...)`, `cv.only_on_esp32`, `cv.only_on_esp8266`, `cv.only_on_rp2040`.
* **Framework-Specific:** `cv.only_with_framework(...)`, `cv.only_with_arduino`, `cv.only_with_esp_idf`.
* **Platform-Specific:** `cv.only_on(["esp32", "esp8266"])`, `cv.only_with_arduino`.
* **Schema Extensions:**
```python
CONFIG_SCHEMA = cv.Schema({ ... })
@@ -186,11 +185,6 @@ This document provides essential context for AI models interacting with this pro
└── components/[component]/ # Component-specific tests
```
Run them using `script/test_build_components`. Use `-c <component>` to test specific components and `-t <target>` for specific platforms.
* **Testing All Components Together:** To verify that all components can be tested together without ID conflicts or configuration issues, use:
```bash
./script/test_component_grouping.py -e config --all
```
This tests all components in a single build to catch conflicts that might not appear when testing components individually. Use `-e config` for fast configuration validation, or `-e compile` for full compilation testing.
* **Debugging and Troubleshooting:**
* **Debug Tools:**
- `esphome config <file>.yaml` to validate configuration.
@@ -221,146 +215,6 @@ This document provides essential context for AI models interacting with this pro
* **Component Development:** Keep dependencies minimal, provide clear error messages, and write comprehensive docstrings and tests.
* **Code Generation:** Generate minimal and efficient C++ code. Validate all user inputs thoroughly. Support multiple platform variations.
* **Configuration Design:** Aim for simplicity with sensible defaults, while allowing for advanced customization.
* **Embedded Systems Optimization:** ESPHome targets resource-constrained microcontrollers. Be mindful of flash size and RAM usage.
**STL Container Guidelines:**
ESPHome runs on embedded systems with limited resources. Choose containers carefully:
1. **Compile-time-known sizes:** Use `std::array` instead of `std::vector` when size is known at compile time.
```cpp
// Bad - generates STL realloc code
std::vector<int> values;
// Good - no dynamic allocation
std::array<int, MAX_VALUES> values;
```
Use `cg.add_define("MAX_VALUES", count)` to set the size from Python configuration.
**For byte buffers:** Avoid `std::vector<uint8_t>` unless the buffer needs to grow. Use `std::unique_ptr<uint8_t[]>` instead.
> **Note:** `std::unique_ptr<uint8_t[]>` does **not** provide bounds checking or iterator support like `std::vector<uint8_t>`. Use it only when you do not need these features and want minimal overhead.
```cpp
// Bad - STL overhead for simple byte buffer
std::vector<uint8_t> buffer;
buffer.resize(256);
// Good - minimal overhead, single allocation
std::unique_ptr<uint8_t[]> buffer = std::make_unique<uint8_t[]>(256);
// Or if size is constant:
std::array<uint8_t, 256> buffer;
```
2. **Compile-time-known fixed sizes with vector-like API:** Use `StaticVector` from `esphome/core/helpers.h` for fixed-size stack allocation with `push_back()` interface.
```cpp
// Bad - generates STL realloc code (_M_realloc_insert)
std::vector<ServiceRecord> services;
services.reserve(5); // Still includes reallocation machinery
// Good - compile-time fixed size, stack allocated, no reallocation machinery
StaticVector<ServiceRecord, MAX_SERVICES> services; // Allocates all MAX_SERVICES on stack
services.push_back(record1); // Tracks count but all slots allocated
```
Use `cg.add_define("MAX_SERVICES", count)` to set the size from Python configuration.
Like `std::array` but with vector-like API (`push_back()`, `size()`) and no STL reallocation code.
3. **Runtime-known sizes:** Use `FixedVector` from `esphome/core/helpers.h` when the size is only known at runtime initialization.
```cpp
// Bad - generates STL realloc code (_M_realloc_insert)
std::vector<TxtRecord> txt_records;
txt_records.reserve(5); // Still includes reallocation machinery
// Good - runtime size, single allocation, no reallocation machinery
FixedVector<TxtRecord> txt_records;
txt_records.init(record_count); // Initialize with exact size at runtime
```
**Benefits:**
- Eliminates `_M_realloc_insert`, `_M_default_append` template instantiations (saves 200-500 bytes per instance)
- Single allocation, no upper bound needed
- No reallocation overhead
- Compatible with protobuf code generation when using `[(fixed_vector) = true]` option
4. **Small datasets (1-16 elements):** Use `std::vector` or `std::array` with simple structs instead of `std::map`/`std::set`/`std::unordered_map`.
```cpp
// Bad - 2KB+ overhead for red-black tree/hash table
std::map<std::string, int> small_lookup;
std::unordered_map<int, std::string> tiny_map;
// Good - simple struct with linear search (std::vector is fine)
struct LookupEntry {
const char *key;
int value;
};
std::vector<LookupEntry> small_lookup = {
{"key1", 10},
{"key2", 20},
{"key3", 30},
};
// Or std::array if size is compile-time constant:
// std::array<LookupEntry, 3> small_lookup = {{ ... }};
```
Linear search on small datasets (1-16 elements) is often faster than hashing/tree overhead, but this depends on lookup frequency and access patterns. For frequent lookups in hot code paths, the O(1) vs O(n) complexity difference may still matter even for small datasets. `std::vector` with simple structs is usually fine—it's the heavy containers (`map`, `set`, `unordered_map`) that should be avoided for small datasets unless profiling shows otherwise.
5. **Detection:** Look for these patterns in compiler output:
- Large code sections with STL symbols (vector, map, set)
- `alloc`, `realloc`, `dealloc` in symbol names
- `_M_realloc_insert`, `_M_default_append` (vector reallocation)
- Red-black tree code (`rb_tree`, `_Rb_tree`)
- Hash table infrastructure (`unordered_map`, `hash`)
**When to optimize:**
- Core components (API, network, logger)
- Widely-used components (mdns, wifi, ble)
- Components causing flash size complaints
**When not to optimize:**
- Single-use niche components
- Code where readability matters more than bytes
- Already using appropriate containers
* **State Management:** Use `CORE.data` for component state that needs to persist during configuration generation. Avoid module-level mutable globals.
**Bad Pattern (Module-Level Globals):**
```python
# Don't do this - state persists between compilation runs
_component_state = []
_use_feature = None
def enable_feature():
global _use_feature
_use_feature = True
```
**Good Pattern (CORE.data with Helpers):**
```python
from esphome.core import CORE
# Keys for CORE.data storage
COMPONENT_STATE_KEY = "my_component_state"
USE_FEATURE_KEY = "my_component_use_feature"
def _get_component_state() -> list:
"""Get component state from CORE.data."""
return CORE.data.setdefault(COMPONENT_STATE_KEY, [])
def _get_use_feature() -> bool | None:
"""Get feature flag from CORE.data."""
return CORE.data.get(USE_FEATURE_KEY)
def _set_use_feature(value: bool) -> None:
"""Set feature flag in CORE.data."""
CORE.data[USE_FEATURE_KEY] = value
def enable_feature():
_set_use_feature(True)
```
**Why this matters:**
- Module-level globals persist between compilation runs if the dashboard doesn't fork/exec
- `CORE.data` automatically clears between runs
- Typed helper functions provide better IDE support and maintainability
- Encapsulation makes state management explicit and testable
* **Security:** Be mindful of security when making changes to the API, web server, or any other network-related code. Do not hardcode secrets or keys.

View File

@@ -1 +1 @@
d7693a1e996cacd4a3d1c9a16336799c2a8cc3db02e4e74084151ce964581248
6af8b429b94191fe8e239fcb3b73f7982d0266cb5b05ffbc81edaeac1bc8c273

View File

@@ -1,5 +1,4 @@
[run]
omit =
esphome/components/*
esphome/analyze_memory/*
tests/integration/*

View File

@@ -47,7 +47,7 @@ runs:
- name: Build and push to ghcr by digest
id: build-ghcr
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@v6.18.0
env:
DOCKER_BUILD_SUMMARY: false
DOCKER_BUILD_RECORD_UPLOAD: false
@@ -73,7 +73,7 @@ runs:
- name: Build and push to dockerhub by digest
id: build-dockerhub
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: docker/build-push-action@v6.18.0
env:
DOCKER_BUILD_SUMMARY: false
DOCKER_BUILD_RECORD_UPLOAD: false

View File

@@ -17,12 +17,12 @@ runs:
steps:
- name: Set up Python ${{ inputs.python-version }}
id: python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ inputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: venv
# yamllint disable-line rule:line-length

View File

@@ -22,17 +22,17 @@ jobs:
if: github.event.action != 'labeled' || github.event.sender.type != 'Bot'
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2
uses: actions/create-github-app-token@v2
with:
app-id: ${{ secrets.ESPHOME_GITHUB_APP_ID }}
private-key: ${{ secrets.ESPHOME_GITHUB_APP_PRIVATE_KEY }}
- name: Auto Label PR
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
github-token: ${{ steps.generate-token.outputs.token }}
script: |
@@ -105,9 +105,7 @@ jobs:
// Calculate data from PR files
const changedFiles = prFiles.map(file => file.filename);
const totalAdditions = prFiles.reduce((sum, file) => sum + (file.additions || 0), 0);
const totalDeletions = prFiles.reduce((sum, file) => sum + (file.deletions || 0), 0);
const totalChanges = totalAdditions + totalDeletions;
const totalChanges = prFiles.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
console.log('Current labels:', currentLabels.join(', '));
console.log('Changed files:', changedFiles.length);
@@ -233,21 +231,16 @@ jobs:
// Strategy: PR size detection
async function detectPRSize() {
const labels = new Set();
const testChanges = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
const nonTestChanges = totalChanges - testChanges;
if (totalChanges <= SMALL_PR_THRESHOLD) {
labels.add('small-pr');
return labels;
}
const testAdditions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
// Don't add too-big if mega-pr label is already present
if (nonTestChanges > TOO_BIG_THRESHOLD && !isMegaPR) {
labels.add('too-big');
@@ -382,7 +375,7 @@ jobs:
const labels = new Set();
// Check for missing tests
if ((allLabels.has('new-component') || allLabels.has('new-platform') || allLabels.has('new-feature')) && !allLabels.has('has-tests')) {
if ((allLabels.has('new-component') || allLabels.has('new-platform')) && !allLabels.has('has-tests')) {
labels.add('needs-tests');
}
@@ -419,13 +412,10 @@ jobs:
// Too big message
if (finalLabels.includes('too-big')) {
const testAdditions = prFiles
const testChanges = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.additions || 0), 0);
const testDeletions = prFiles
.filter(file => file.filename.startsWith('tests/'))
.reduce((sum, file) => sum + (file.deletions || 0), 0);
const nonTestChanges = (totalAdditions - testAdditions) - (totalDeletions - testDeletions);
.reduce((sum, file) => sum + (file.additions || 0) + (file.deletions || 0), 0);
const nonTestChanges = totalChanges - testChanges;
const tooManyLabels = finalLabels.length > MAX_LABELS;
const tooManyChanges = nonTestChanges > TOO_BIG_THRESHOLD;

View File

@@ -21,9 +21,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@v5.6.0
with:
python-version: "3.11"
@@ -47,7 +47,7 @@ jobs:
fi
- if: failure()
name: Review PR
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
script: |
await github.rest.pulls.createReview({
@@ -62,7 +62,7 @@ jobs:
run: git diff
- if: failure()
name: Archive artifacts
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4.6.2
with:
name: generated-proto-files
path: |
@@ -70,7 +70,7 @@ jobs:
esphome/components/api/api_pb2_service.*
- if: success()
name: Dismiss review
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
script: |
let reviews = await github.rest.pulls.listReviews({

View File

@@ -6,7 +6,6 @@ on:
- ".clang-tidy"
- "platformio.ini"
- "requirements_dev.txt"
- "sdkconfig.defaults"
- ".clang-tidy.hash"
- "script/clang_tidy_hash.py"
- ".github/workflows/ci-clang-tidy-hash.yml"
@@ -21,10 +20,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@v5.6.0
with:
python-version: "3.11"
@@ -42,7 +41,7 @@ jobs:
- if: failure()
name: Request changes
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
script: |
await github.rest.pulls.createReview({
@@ -55,7 +54,7 @@ jobs:
- if: success()
name: Dismiss review
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
script: |
let reviews = await github.rest.pulls.listReviews({

View File

@@ -43,13 +43,13 @@ jobs:
- "docker"
# - "lint"
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@v5.6.0
with:
python-version: "3.11"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@v3.11.1
- name: Set TAG
run: |

View File

@@ -36,18 +36,18 @@ jobs:
cache-key: ${{ steps.cache-key.outputs.key }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Generate cache-key
id: cache-key
run: echo key="${{ hashFiles('requirements.txt', 'requirements_test.txt', '.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@v5.6.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@v4.2.4
with:
path: venv
# yamllint disable-line rule:line-length
@@ -70,7 +70,7 @@ jobs:
if: needs.determine-jobs.outputs.python-linters == 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -91,7 +91,7 @@ jobs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
@@ -105,7 +105,6 @@ jobs:
script/ci-custom.py
script/build_codeowners.py --check
script/build_language_schema.py --check
script/generate-esp32-boards.py --check
pytest:
name: Run pytest
@@ -114,7 +113,8 @@ jobs:
matrix:
python-version:
- "3.11"
- "3.14"
- "3.12"
- "3.13"
os:
- ubuntu-latest
- macOS-latest
@@ -123,16 +123,20 @@ jobs:
# Minimize CI resource usage
# by only running the Python version
# version used for docker images on Windows and macOS
- python-version: "3.14"
- python-version: "3.13"
os: windows-latest
- python-version: "3.14"
- python-version: "3.12"
os: windows-latest
- python-version: "3.13"
os: macOS-latest
- python-version: "3.12"
os: macOS-latest
runs-on: ${{ matrix.os }}
needs:
- common
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Restore Python
id: restore-python
uses: ./.github/actions/restore-python
@@ -152,12 +156,12 @@ jobs:
. venv/bin/activate
pytest -vv --cov-report=xml --tb=native -n auto tests --ignore=tests/integration/
- name: Upload coverage to Codecov
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
uses: codecov/codecov-action@v5.4.3
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: Save Python virtual environment cache
if: github.ref == 'refs/heads/dev'
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/save@v4.2.4
with:
path: venv
key: ${{ runner.os }}-${{ steps.restore-python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
@@ -172,13 +176,10 @@ jobs:
clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
python-linters: ${{ steps.determine.outputs.python-linters }}
changed-components: ${{ steps.determine.outputs.changed-components }}
changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
directly-changed-components-with-tests: ${{ steps.determine.outputs.directly-changed-components-with-tests }}
component-test-count: ${{ steps.determine.outputs.component-test-count }}
memory_impact: ${{ steps.determine.outputs.memory-impact }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
with:
# Fetch enough history to find the merge base
fetch-depth: 2
@@ -202,10 +203,7 @@ jobs:
echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
echo "directly-changed-components-with-tests=$(echo "$output" | jq -c '.directly_changed_components_with_tests')" >> $GITHUB_OUTPUT
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT
integration-tests:
name: Run integration tests
@@ -216,15 +214,15 @@ jobs:
if: needs.determine-jobs.outputs.integration-tests == 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Set up Python 3.13
id: python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@v5.6.0
with:
python-version: "3.13"
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@v4.2.4
with:
path: venv
key: ${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ needs.common.outputs.cache-key }}
@@ -289,7 +287,7 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
with:
# Need history for HEAD~1 to work for checking changed files
fetch-depth: 2
@@ -302,14 +300,14 @@ jobs:
- name: Cache platformio
if: github.ref == 'refs/heads/dev'
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache@v4.2.4
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
- name: Cache platformio
if: github.ref != 'refs/heads/dev'
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
uses: actions/cache/restore@v4.2.4
with:
path: ~/.platformio
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
@@ -357,458 +355,122 @@ jobs:
# yamllint disable-line rule:line-length
if: always()
test-build-components-splitter:
name: Split components for intelligent grouping (40 weighted per batch)
test-build-components:
name: Component test ${{ matrix.file }}
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
outputs:
matrix: ${{ steps.split.outputs.components }}
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0 && fromJSON(needs.determine-jobs.outputs.component-test-count) < 100
strategy:
fail-fast: false
max-parallel: 2
matrix:
file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }}
steps:
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install libsdl2-dev
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Split components intelligently based on bus configurations
id: split
- name: test_build_components -e config -c ${{ matrix.file }}
run: |
. venv/bin/activate
./script/test_build_components -e config -c ${{ matrix.file }}
- name: test_build_components -e compile -c ${{ matrix.file }}
run: |
. venv/bin/activate
./script/test_build_components -e compile -c ${{ matrix.file }}
# Use intelligent splitter that groups components with same bus configs
components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
# Only isolate directly changed components when targeting dev branch
# For beta/release branches, group everything for faster CI
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
directly_changed='[]'
echo "Target branch: ${{ github.base_ref }} - grouping all components"
else
directly_changed='${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}'
echo "Target branch: ${{ github.base_ref }} - isolating directly changed components"
fi
echo "Splitting components intelligently..."
output=$(python3 script/split_components_for_ci.py --components "$components" --directly-changed "$directly_changed" --batch-size 40 --output github)
echo "$output" >> $GITHUB_OUTPUT
test-build-components-splitter:
name: Split components for testing into 20 groups maximum
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
outputs:
matrix: ${{ steps.split.outputs.components }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v5.0.0
- name: Split components into 20 groups
id: split
run: |
components=$(echo '${{ needs.determine-jobs.outputs.changed-components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
echo "components=$components" >> $GITHUB_OUTPUT
test-build-components-split:
name: Test components batch (${{ matrix.components }})
name: Test split components
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
- test-build-components-splitter
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) > 0
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
strategy:
fail-fast: false
max-parallel: ${{ (startsWith(github.base_ref, 'beta') || startsWith(github.base_ref, 'release')) && 8 || 4 }}
max-parallel: 4
matrix:
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
steps:
- name: Show disk space
run: |
echo "Available disk space:"
df -h
- name: List components
run: echo ${{ matrix.components }}
- name: Cache apt packages
uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
with:
packages: libsdl2-dev
version: 1.0
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install libsdl2-dev
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Validate and compile components with intelligent grouping
- name: Validate config
run: |
. venv/bin/activate
# Check if /mnt has more free space than / before bind mounting
# Extract available space in KB for comparison
root_avail=$(df -k / | awk 'NR==2 {print $4}')
mnt_avail=$(df -k /mnt 2>/dev/null | awk 'NR==2 {print $4}')
echo "Available space: / has ${root_avail}KB, /mnt has ${mnt_avail}KB"
# Only use /mnt if it has more space than /
if [ -n "$mnt_avail" ] && [ "$mnt_avail" -gt "$root_avail" ]; then
echo "Using /mnt for build files (more space available)"
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
sudo mkdir -p /mnt/platformio
sudo chown $USER:$USER /mnt/platformio
mkdir -p ~/.platformio
sudo mount --bind /mnt/platformio ~/.platformio
# Bind mount test build directory to /mnt
sudo mkdir -p /mnt/test_build_components_build
sudo chown $USER:$USER /mnt/test_build_components_build
mkdir -p tests/test_build_components/build
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
else
echo "Using / for build files (more space available than /mnt or /mnt unavailable)"
fi
# Convert space-separated components to comma-separated for Python script
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
# Only isolate directly changed components when targeting dev branch
# For beta/release branches, group everything for faster CI
#
# WHY ISOLATE DIRECTLY CHANGED COMPONENTS?
# - Isolated tests run WITHOUT --testing-mode, enabling full validation
# - This catches pin conflicts and other issues in directly changed code
# - Grouped tests use --testing-mode to allow config merging (disables some checks)
# - Dependencies are safe to group since they weren't modified in this PR
if [[ "${{ github.base_ref }}" == beta* ]] || [[ "${{ github.base_ref }}" == release* ]]; then
directly_changed_csv=""
echo "Testing components: $components_csv"
echo "Target branch: ${{ github.base_ref }} - grouping all components"
else
directly_changed_csv=$(echo '${{ needs.determine-jobs.outputs.directly-changed-components-with-tests }}' | jq -r 'join(",")')
echo "Testing components: $components_csv"
echo "Target branch: ${{ github.base_ref }} - isolating directly changed components: $directly_changed_csv"
fi
echo ""
# Show disk space before validation (after bind mounts setup)
echo "Disk space before config validation:"
df -h
echo ""
# Run config validation with grouping and isolation
python3 script/test_build_components.py -e config -c "$components_csv" -f --isolate "$directly_changed_csv"
echo ""
echo "Config validation passed! Starting compilation..."
echo ""
# Show disk space before compilation
echo "Disk space before compilation:"
df -h
echo ""
# Run compilation with grouping and isolation
python3 script/test_build_components.py -e compile -c "$components_csv" -f --isolate "$directly_changed_csv"
for component in ${{ matrix.components }}; do
./script/test_build_components -e config -c $component
done
- name: Compile config
run: |
. venv/bin/activate
mkdir build_cache
export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
for component in ${{ matrix.components }}; do
./script/test_build_components -e compile -c $component
done
pre-commit-ci-lite:
name: pre-commit.ci lite
runs-on: ubuntu-latest
needs:
- common
if: github.event_name == 'pull_request' && !startsWith(github.base_ref, 'beta') && !startsWith(github.base_ref, 'release')
if: github.event_name == 'pull_request' && github.base_ref != 'beta' && github.base_ref != 'release'
steps:
- name: Check out code from GitHub
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- uses: esphome/action@43cd1109c09c544d97196f7730ee5b2e0cc6d81e # v3.0.1 fork with pinned actions/cache
- uses: pre-commit/action@v3.0.1
env:
SKIP: pylint,clang-tidy-hash
- uses: pre-commit-ci/lite-action@5d6cc0eb514c891a40562a58a8e71576c5c7fb43 # v1.1.0
- uses: pre-commit-ci/lite-action@v1.1.0
if: always()
memory-impact-target-branch:
name: Build target branch for memory impact
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true'
outputs:
ram_usage: ${{ steps.extract.outputs.ram_usage }}
flash_usage: ${{ steps.extract.outputs.flash_usage }}
cache_hit: ${{ steps.cache-memory-analysis.outputs.cache-hit }}
skip: ${{ steps.check-script.outputs.skip }}
steps:
- name: Check out target branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.base_ref }}
# Check if memory impact extraction script exists on target branch
# If not, skip the analysis (this handles older branches that don't have the feature)
- name: Check for memory impact script
id: check-script
run: |
if [ -f "script/ci_memory_impact_extract.py" ]; then
echo "skip=false" >> $GITHUB_OUTPUT
else
echo "skip=true" >> $GITHUB_OUTPUT
echo "::warning::ci_memory_impact_extract.py not found on target branch, skipping memory impact analysis"
fi
# All remaining steps only run if script exists
- name: Generate cache key
id: cache-key
if: steps.check-script.outputs.skip != 'true'
run: |
# Get the commit SHA of the target branch
target_sha=$(git rev-parse HEAD)
# Hash the build infrastructure files (all files that affect build/analysis)
infra_hash=$(cat \
script/test_build_components.py \
script/ci_memory_impact_extract.py \
script/analyze_component_buses.py \
script/merge_component_configs.py \
script/ci_helpers.py \
.github/workflows/ci.yml \
| sha256sum | cut -d' ' -f1)
# Get platform and components from job inputs
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
components_hash=$(echo "$components" | sha256sum | cut -d' ' -f1)
# Combine into cache key
cache_key="memory-analysis-target-${target_sha}-${infra_hash}-${platform}-${components_hash}"
echo "cache-key=${cache_key}" >> $GITHUB_OUTPUT
echo "Cache key: ${cache_key}"
- name: Restore cached memory analysis
id: cache-memory-analysis
if: steps.check-script.outputs.skip != 'true'
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: memory-analysis-target.json
key: ${{ steps.cache-key.outputs.cache-key }}
- name: Cache status
if: steps.check-script.outputs.skip != 'true'
run: |
if [ "${{ steps.cache-memory-analysis.outputs.cache-hit }}" == "true" ]; then
echo "✓ Cache hit! Using cached memory analysis results."
echo " Skipping build step to save time."
else
echo "✗ Cache miss. Will build and analyze memory usage."
fi
- name: Restore Python
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache platformio
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
- name: Build, compile, and analyze memory
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true'
id: build
run: |
. venv/bin/activate
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
echo "Building with test_build_components.py for $platform with components:"
echo "$components" | jq -r '.[]' | sed 's/^/ - /'
# Use test_build_components.py which handles grouping automatically
# Pass components as comma-separated list
component_list=$(echo "$components" | jq -r 'join(",")')
echo "Compiling with test_build_components.py..."
# Run build and extract memory with auto-detection of build directory for detailed analysis
# Use tee to show output in CI while also piping to extraction script
python script/test_build_components.py \
-e compile \
-c "$component_list" \
-t "$platform" 2>&1 | \
tee /dev/stderr | \
python script/ci_memory_impact_extract.py \
--output-env \
--output-json memory-analysis-target.json
- name: Save memory analysis to cache
if: steps.check-script.outputs.skip != 'true' && steps.cache-memory-analysis.outputs.cache-hit != 'true' && steps.build.outcome == 'success'
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: memory-analysis-target.json
key: ${{ steps.cache-key.outputs.cache-key }}
- name: Extract memory usage for outputs
id: extract
if: steps.check-script.outputs.skip != 'true'
run: |
if [ -f memory-analysis-target.json ]; then
ram=$(jq -r '.ram_bytes' memory-analysis-target.json)
flash=$(jq -r '.flash_bytes' memory-analysis-target.json)
echo "ram_usage=${ram}" >> $GITHUB_OUTPUT
echo "flash_usage=${flash}" >> $GITHUB_OUTPUT
echo "RAM: ${ram} bytes, Flash: ${flash} bytes"
else
echo "Error: memory-analysis-target.json not found"
exit 1
fi
- name: Upload memory analysis JSON
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: memory-analysis-target
path: memory-analysis-target.json
if-no-files-found: warn
retention-days: 1
memory-impact-pr-branch:
name: Build PR branch for memory impact
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true'
outputs:
ram_usage: ${{ steps.extract.outputs.ram_usage }}
flash_usage: ${{ steps.extract.outputs.flash_usage }}
steps:
- name: Check out PR branch
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Cache platformio
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: ~/.platformio
key: platformio-memory-${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}-${{ hashFiles('platformio.ini') }}
- name: Build, compile, and analyze memory
id: extract
run: |
. venv/bin/activate
components='${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}'
platform="${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}"
echo "Building with test_build_components.py for $platform with components:"
echo "$components" | jq -r '.[]' | sed 's/^/ - /'
# Use test_build_components.py which handles grouping automatically
# Pass components as comma-separated list
component_list=$(echo "$components" | jq -r 'join(",")')
echo "Compiling with test_build_components.py..."
# Run build and extract memory with auto-detection of build directory for detailed analysis
# Use tee to show output in CI while also piping to extraction script
python script/test_build_components.py \
-e compile \
-c "$component_list" \
-t "$platform" 2>&1 | \
tee /dev/stderr | \
python script/ci_memory_impact_extract.py \
--output-env \
--output-json memory-analysis-pr.json
- name: Upload memory analysis JSON
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: memory-analysis-pr
path: memory-analysis-pr.json
if-no-files-found: warn
retention-days: 1
memory-impact-comment:
name: Comment memory impact
runs-on: ubuntu-24.04
needs:
- common
- determine-jobs
- memory-impact-target-branch
- memory-impact-pr-branch
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.memory_impact).should_run == 'true' && needs.memory-impact-target-branch.outputs.skip != 'true'
permissions:
contents: read
pull-requests: write
steps:
- name: Check out code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Restore Python
uses: ./.github/actions/restore-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
cache-key: ${{ needs.common.outputs.cache-key }}
- name: Download target analysis JSON
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: memory-analysis-target
path: ./memory-analysis
continue-on-error: true
- name: Download PR analysis JSON
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4.3.0
with:
name: memory-analysis-pr
path: ./memory-analysis
continue-on-error: true
- name: Post or update PR comment
env:
GH_TOKEN: ${{ github.token }}
COMPONENTS: ${{ toJSON(fromJSON(needs.determine-jobs.outputs.memory_impact).components) }}
PLATFORM: ${{ fromJSON(needs.determine-jobs.outputs.memory_impact).platform }}
TARGET_RAM: ${{ needs.memory-impact-target-branch.outputs.ram_usage }}
TARGET_FLASH: ${{ needs.memory-impact-target-branch.outputs.flash_usage }}
PR_RAM: ${{ needs.memory-impact-pr-branch.outputs.ram_usage }}
PR_FLASH: ${{ needs.memory-impact-pr-branch.outputs.flash_usage }}
TARGET_CACHE_HIT: ${{ needs.memory-impact-target-branch.outputs.cache_hit }}
run: |
. venv/bin/activate
# Check if analysis JSON files exist
target_json_arg=""
pr_json_arg=""
if [ -f ./memory-analysis/memory-analysis-target.json ]; then
echo "Found target analysis JSON"
target_json_arg="--target-json ./memory-analysis/memory-analysis-target.json"
else
echo "No target analysis JSON found"
fi
if [ -f ./memory-analysis/memory-analysis-pr.json ]; then
echo "Found PR analysis JSON"
pr_json_arg="--pr-json ./memory-analysis/memory-analysis-pr.json"
else
echo "No PR analysis JSON found"
fi
# Add cache flag if target was cached
cache_flag=""
if [ "$TARGET_CACHE_HIT" == "true" ]; then
cache_flag="--target-cache-hit"
fi
python script/ci_memory_impact_comment.py \
--pr-number "${{ github.event.pull_request.number }}" \
--components "$COMPONENTS" \
--platform "$PLATFORM" \
--target-ram "$TARGET_RAM" \
--target-flash "$TARGET_FLASH" \
--pr-ram "$PR_RAM" \
--pr-flash "$PR_FLASH" \
$target_json_arg \
$pr_json_arg \
$cache_flag
ci-status:
name: CI Status
runs-on: ubuntu-24.04
@@ -820,12 +482,10 @@ jobs:
- integration-tests
- clang-tidy
- determine-jobs
- test-build-components
- test-build-components-splitter
- test-build-components-split
- pre-commit-ci-lite
- memory-impact-target-branch
- memory-impact-pr-branch
- memory-impact-comment
if: always()
steps:
- name: Success

View File

@@ -25,7 +25,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Request reviews from component codeowners
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
script: |
const owner = context.repo.owner;

View File

@@ -54,11 +54,11 @@ jobs:
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
@@ -86,6 +86,6 @@ jobs:
exit 1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
uses: github/codeql-action/analyze@v3
with:
category: "/language:${{matrix.language}}"

View File

@@ -15,7 +15,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Add external component comment
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |

View File

@@ -19,7 +19,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Notify codeowners for component issues
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
script: |
const owner = context.repo.owner;

24
.github/workflows/needs-docs.yml vendored Normal file
View File

@@ -0,0 +1,24 @@
name: Needs Docs
on:
pull_request:
types: [labeled, unlabeled]
jobs:
check:
name: Check
runs-on: ubuntu-latest
steps:
- name: Check for needs-docs label
uses: actions/github-script@v7.0.1
with:
script: |
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number
});
const needsDocs = labels.find(label => label.name === 'needs-docs');
if (needsDocs) {
core.setFailed('Pull request needs docs');
}

View File

@@ -20,7 +20,7 @@ jobs:
branch_build: ${{ steps.tag.outputs.branch_build }}
deploy_env: ${{ steps.tag.outputs.deploy_env }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v5.0.0
- name: Get tag
id: tag
# yamllint disable rule:line-length
@@ -60,9 +60,9 @@ jobs:
contents: read
id-token: write
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@v5.6.0
with:
python-version: "3.x"
- name: Build
@@ -70,7 +70,7 @@ jobs:
pip3 install build
python3 -m build
- name: Publish
uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0
uses: pypa/gh-action-pypi-publish@v1.12.4
with:
skip-existing: true
@@ -92,22 +92,22 @@ jobs:
os: "ubuntu-24.04-arm"
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v5.0.0
- name: Set up Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@v5.6.0
with:
python-version: "3.11"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@v3.11.1
- name: Log in to docker hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@v3.5.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@v3.5.0
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -138,7 +138,7 @@ jobs:
# version: ${{ needs.init.outputs.tag }}
- name: Upload digests
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
uses: actions/upload-artifact@v4.6.2
with:
name: digests-${{ matrix.platform.arch }}
path: /tmp/digests
@@ -168,27 +168,27 @@ jobs:
- ghcr
- dockerhub
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/checkout@v5.0.0
- name: Download digests
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
uses: actions/download-artifact@v5.0.0
with:
pattern: digests-*
path: /tmp/digests
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
uses: docker/setup-buildx-action@v3.11.1
- name: Log in to docker hub
if: matrix.registry == 'dockerhub'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@v3.5.0
with:
username: ${{ secrets.DOCKER_USER }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log in to the GitHub container registry
if: matrix.registry == 'ghcr'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@v3.5.0
with:
registry: ghcr.io
username: ${{ github.actor }}
@@ -220,7 +220,7 @@ jobs:
- deploy-manifest
steps:
- name: Trigger Workflow
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
github-token: ${{ secrets.DEPLOY_HA_ADDON_REPO_TOKEN }}
script: |
@@ -246,7 +246,7 @@ jobs:
environment: ${{ needs.init.outputs.deploy_env }}
steps:
- name: Trigger Workflow
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
uses: actions/github-script@v7.0.1
with:
github-token: ${{ secrets.DEPLOY_ESPHOME_SCHEMA_REPO_TOKEN }}
script: |

View File

@@ -15,52 +15,36 @@ concurrency:
jobs:
stale:
if: github.repository_owner == 'esphome'
runs-on: ubuntu-latest
steps:
- name: Stale
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
- uses: actions/stale@v9.1.0
with:
debug-only: ${{ github.ref != 'refs/heads/dev' }} # Dry-run when not run on dev branch
remove-stale-when-updated: true
operations-per-run: 400
# The 90 day stale policy for PRs
# - PRs
# - No PRs marked as "not-stale"
# - No Issues (see below)
days-before-pr-stale: 90
days-before-pr-close: 7
days-before-issue-stale: -1
days-before-issue-close: -1
remove-stale-when-updated: true
stale-pr-label: "stale"
exempt-pr-labels: "not-stale"
stale-pr-message: >
There hasn't been any activity on this pull request recently. This
pull request has been automatically marked as stale because of that
and will be closed if no further activity occurs within 7 days.
Thank you for your contributions.
If you are the author of this PR, please leave a comment if you want
to keep it open. Also, please rebase your PR onto the latest dev
branch to ensure that it's up to date with the latest changes.
Thank you for your contribution!
# The 90 day stale policy for Issues
# - Issues
# - No Issues marked as "not-stale"
# - No PRs (see above)
days-before-issue-stale: 90
days-before-issue-close: 7
# Use stale to automatically close issues with a
# reference to the issue tracker
close-issues:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v9.1.0
with:
days-before-pr-stale: -1
days-before-pr-close: -1
days-before-issue-stale: 1
days-before-issue-close: 1
remove-stale-when-updated: true
stale-issue-label: "stale"
exempt-issue-labels: "not-stale"
stale-issue-message: >
There hasn't been any activity on this issue recently. Due to the
high number of incoming GitHub notifications, we have to clean some
of the old issues, as many of them have already been resolved with
the latest updates.
Please make sure to update to the latest ESPHome version and
check if that solves the issue. Let us know if that works for you by
adding a comment 👍
This issue has now been marked as stale and will be closed if no
further activity occurs. Thank you for your contributions.
https://github.com/esphome/esphome/issues/430

View File

@@ -1,30 +0,0 @@
name: Status check labels
on:
pull_request:
types: [labeled, unlabeled]
jobs:
check:
name: Check ${{ matrix.label }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
label:
- needs-docs
- merge-after-release
steps:
- name: Check for ${{ matrix.label }} label
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number
});
const hasLabel = labels.find(label => label.name === '${{ matrix.label }}');
if (hasLabel) {
core.setFailed('Pull request cannot be merged, it is labeled as ${{ matrix.label }}');
}

View File

@@ -13,16 +13,16 @@ jobs:
if: github.repository == 'esphome/esphome'
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
- name: Checkout Home Assistant
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
uses: actions/checkout@v5.0.0
with:
repository: home-assistant/core
path: lib/home-assistant
- name: Setup Python
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
uses: actions/setup-python@v5.6.0
with:
python-version: 3.13
@@ -30,18 +30,13 @@ jobs:
run: |
python -m pip install --upgrade pip
pip install -e lib/home-assistant
pip install -r requirements_test.txt pre-commit
- name: Sync
run: |
python ./script/sync-device_class.py
- name: Run pre-commit hooks
run: |
python script/run-in-env.py pre-commit run --all-files
- name: Commit changes
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
uses: peter-evans/create-pull-request@v7.0.8
with:
commit-message: "Synchronise Device Classes from Home Assistant"
committer: esphomebot <esphome@openhomefoundation.org>

View File

@@ -11,7 +11,7 @@ ci:
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.14.1
rev: v0.12.9
hooks:
# Run the linter.
- id: ruff

View File

@@ -62,12 +62,11 @@ esphome/components/bedjet/fan/* @jhansche
esphome/components/bedjet/sensor/* @javawizard @jhansche
esphome/components/beken_spi_led_strip/* @Mat931
esphome/components/bh1750/* @OttoWinter
esphome/components/bh1900nux/* @B48D81EFCC
esphome/components/binary_sensor/* @esphome/core
esphome/components/bk72xx/* @kuba2k2
esphome/components/bl0906/* @athom-tech @jesserockz @tarontop
esphome/components/bl0939/* @ziceva
esphome/components/bl0940/* @dan-s-github @tobias-
esphome/components/bl0940/* @tobias-
esphome/components/bl0942/* @dbuezas @dwmw2
esphome/components/ble_client/* @buxtronix @clydebarrow
esphome/components/bluetooth_proxy/* @bdraco @jesserockz
@@ -89,8 +88,7 @@ esphome/components/bp1658cj/* @Cossid
esphome/components/bp5758d/* @Cossid
esphome/components/button/* @esphome/core
esphome/components/bytebuffer/* @clydebarrow
esphome/components/camera/* @bdraco @DT-art1
esphome/components/camera_encoder/* @DT-art1
esphome/components/camera/* @DT-art1 @bdraco
esphome/components/canbus/* @danielschramm @mvturnho
esphome/components/cap1188/* @mreditor97
esphome/components/captive_portal/* @esphome/core
@@ -140,16 +138,15 @@ esphome/components/ens160_base/* @latonita @vincentscode
esphome/components/ens160_i2c/* @latonita
esphome/components/ens160_spi/* @latonita
esphome/components/ens210/* @itn3rd77
esphome/components/epaper_spi/* @esphome/core
esphome/components/es7210/* @kahrendt
esphome/components/es7243e/* @kbx81
esphome/components/es8156/* @kbx81
esphome/components/es8311/* @kahrendt @kroimon
esphome/components/es8388/* @P4uLT
esphome/components/esp32/* @esphome/core
esphome/components/esp32_ble/* @bdraco @jesserockz @Rapsssito
esphome/components/esp32_ble/* @Rapsssito @bdraco @jesserockz
esphome/components/esp32_ble_client/* @bdraco @jesserockz
esphome/components/esp32_ble_server/* @clydebarrow @jesserockz @Rapsssito
esphome/components/esp32_ble_server/* @Rapsssito @clydebarrow @jesserockz
esphome/components/esp32_ble_tracker/* @bdraco
esphome/components/esp32_camera_web_server/* @ayufan
esphome/components/esp32_can/* @Sympatron
@@ -162,13 +159,14 @@ esphome/components/esp_ldo/* @clydebarrow
esphome/components/espnow/* @jesserockz
esphome/components/ethernet_info/* @gtjadsonsantos
esphome/components/event/* @nohat
esphome/components/event_emitter/* @Rapsssito
esphome/components/exposure_notifications/* @OttoWinter
esphome/components/ezo/* @ssieb
esphome/components/ezo_pmp/* @carlos-sarmiento
esphome/components/factory_reset/* @anatoly-savchenkov
esphome/components/fastled_base/* @OttoWinter
esphome/components/feedback/* @ianchi
esphome/components/fingerprint_grow/* @alexborro @loongyh @OnFreund
esphome/components/fingerprint_grow/* @OnFreund @alexborro @loongyh
esphome/components/font/* @clydebarrow @esphome/core
esphome/components/fs3000/* @kahrendt
esphome/components/ft5x06/* @clydebarrow
@@ -204,7 +202,7 @@ esphome/components/heatpumpir/* @rob-deutsch
esphome/components/hitachi_ac424/* @sourabhjaiswal
esphome/components/hm3301/* @freekode
esphome/components/hmac_md5/* @dwmw2
esphome/components/homeassistant/* @esphome/core @OttoWinter
esphome/components/homeassistant/* @OttoWinter @esphome/core
esphome/components/homeassistant/number/* @landonr
esphome/components/homeassistant/switch/* @Links2004
esphome/components/honeywell_hih_i2c/* @Benichou34
@@ -229,13 +227,13 @@ esphome/components/iaqcore/* @yozik04
esphome/components/ili9xxx/* @clydebarrow @nielsnl68
esphome/components/improv_base/* @esphome/core
esphome/components/improv_serial/* @esphome/core
esphome/components/ina226/* @latonita @Sergio303
esphome/components/ina226/* @Sergio303 @latonita
esphome/components/ina260/* @mreditor97
esphome/components/ina2xx_base/* @latonita
esphome/components/ina2xx_i2c/* @latonita
esphome/components/ina2xx_spi/* @latonita
esphome/components/inkbird_ibsth1_mini/* @fkirill
esphome/components/inkplate/* @jesserockz @JosipKuci
esphome/components/inkplate6/* @jesserockz
esphome/components/integration/* @OttoWinter
esphome/components/internal_temperature/* @Mat931
esphome/components/interval/* @esphome/core
@@ -258,7 +256,6 @@ esphome/components/libretiny_pwm/* @kuba2k2
esphome/components/light/* @esphome/core
esphome/components/lightwaverf/* @max246
esphome/components/lilygo_t5_47/touchscreen/* @jesserockz
esphome/components/lm75b/* @beormund
esphome/components/ln882x/* @lamauny
esphome/components/lock/* @esphome/core
esphome/components/logger/* @esphome/core
@@ -279,8 +276,8 @@ esphome/components/max7219digit/* @rspaargaren
esphome/components/max9611/* @mckaymatthew
esphome/components/mcp23008/* @jesserockz
esphome/components/mcp23017/* @jesserockz
esphome/components/mcp23s08/* @jesserockz @SenexCrenshaw
esphome/components/mcp23s17/* @jesserockz @SenexCrenshaw
esphome/components/mcp23s08/* @SenexCrenshaw @jesserockz
esphome/components/mcp23s17/* @SenexCrenshaw @jesserockz
esphome/components/mcp23x08_base/* @jesserockz
esphome/components/mcp23x17_base/* @jesserockz
esphome/components/mcp23xxx_base/* @jesserockz
@@ -301,7 +298,6 @@ esphome/components/mics_4514/* @jesserockz
esphome/components/midea/* @dudanov
esphome/components/midea_ir/* @dudanov
esphome/components/mipi_dsi/* @clydebarrow
esphome/components/mipi_rgb/* @clydebarrow
esphome/components/mipi_spi/* @clydebarrow
esphome/components/mitsubishi/* @RubyBailey
esphome/components/mixer/speaker/* @kahrendt
@@ -345,7 +341,7 @@ esphome/components/ota/* @esphome/core
esphome/components/output/* @esphome/core
esphome/components/packet_transport/* @clydebarrow
esphome/components/pca6416a/* @Mat931
esphome/components/pca9554/* @bdraco @clydebarrow @hwstar
esphome/components/pca9554/* @clydebarrow @hwstar
esphome/components/pcf85063/* @brogon
esphome/components/pcf8563/* @KoenBreeman
esphome/components/pi4ioe5v6408/* @jesserockz
@@ -356,9 +352,9 @@ esphome/components/pm2005/* @andrewjswan
esphome/components/pmsa003i/* @sjtrny
esphome/components/pmsx003/* @ximex
esphome/components/pmwcs3/* @SeByDocKy
esphome/components/pn532/* @jesserockz @OttoWinter
esphome/components/pn532_i2c/* @jesserockz @OttoWinter
esphome/components/pn532_spi/* @jesserockz @OttoWinter
esphome/components/pn532/* @OttoWinter @jesserockz
esphome/components/pn532_i2c/* @OttoWinter @jesserockz
esphome/components/pn532_spi/* @OttoWinter @jesserockz
esphome/components/pn7150/* @jesserockz @kbx81
esphome/components/pn7150_i2c/* @jesserockz @kbx81
esphome/components/pn7160/* @jesserockz @kbx81
@@ -367,7 +363,7 @@ esphome/components/pn7160_spi/* @jesserockz @kbx81
esphome/components/power_supply/* @esphome/core
esphome/components/preferences/* @esphome/core
esphome/components/psram/* @esphome/core
esphome/components/pulse_meter/* @cstaahl @stevebaxter @TrentHouliston
esphome/components/pulse_meter/* @TrentHouliston @cstaahl @stevebaxter
esphome/components/pvvx_mithermometer/* @pasiz
esphome/components/pylontech/* @functionpointer
esphome/components/qmp6988/* @andrewpc
@@ -408,8 +404,7 @@ esphome/components/sensirion_common/* @martgras
esphome/components/sensor/* @esphome/core
esphome/components/sfa30/* @ghsensdev
esphome/components/sgp40/* @SenexCrenshaw
esphome/components/sgp4x/* @martgras @SenexCrenshaw
esphome/components/sha256/* @esphome/core
esphome/components/sgp4x/* @SenexCrenshaw @martgras
esphome/components/shelly_dimmer/* @edge90 @rnauber
esphome/components/sht3xd/* @mrtoy-me
esphome/components/sht4x/* @sjtrny
@@ -431,7 +426,6 @@ esphome/components/speaker/media_player/* @kahrendt @synesthesiam
esphome/components/spi/* @clydebarrow @esphome/core
esphome/components/spi_device/* @clydebarrow
esphome/components/spi_led_strip/* @clydebarrow
esphome/components/split_buffer/* @jesserockz
esphome/components/sprinkler/* @kbx81
esphome/components/sps30/* @martgras
esphome/components/ssd1322_base/* @kbx81
@@ -537,7 +531,6 @@ esphome/components/wk2204_spi/* @DrCoolZic
esphome/components/wk2212_i2c/* @DrCoolZic
esphome/components/wk2212_spi/* @DrCoolZic
esphome/components/wl_134/* @hobbypunk90
esphome/components/wts01/* @alepee
esphome/components/x9c/* @EtienneMD
esphome/components/xgzp68xx/* @gcormier
esphome/components/xiaomi_hhccjcy10/* @fariouche
@@ -553,4 +546,3 @@ esphome/components/xxtea/* @clydebarrow
esphome/components/zephyr/* @tomaszduda23
esphome/components/zhlt01/* @cfeenstra1024
esphome/components/zio_ultrasonic/* @kahrendt
esphome/components/zwave_proxy/* @kbx81

View File

@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2025.11.0-dev
PROJECT_NUMBER = 2025.9.0-dev
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@@ -6,7 +6,6 @@ import getpass
import importlib
import logging
import os
from pathlib import Path
import re
import sys
import time
@@ -14,15 +13,11 @@ from typing import Protocol
import argcomplete
# Note: Do not import modules from esphome.components here, as this would
# cause them to be loaded before external components are processed, resulting
# in the built-in version being used instead of the external component one.
from esphome import const, writer, yaml_util
import esphome.codegen as cg
from esphome.config import iter_component_configs, read_config, strip_default_ids
from esphome.const import (
ALLOWED_NAME_CHARS,
CONF_API,
CONF_BAUD_RATE,
CONF_BROKER,
CONF_DEASSERT_RTS_DTR,
@@ -48,7 +43,6 @@ from esphome.const import (
SECRETS_FILES,
)
from esphome.core import CORE, EsphomeError, coroutine
from esphome.enum import StrEnum
from esphome.helpers import get_bool_env, indent, is_ip_address
from esphome.log import AnsiFore, color, setup_log
from esphome.types import ConfigType
@@ -112,34 +106,13 @@ def choose_prompt(options, purpose: str = None):
return options[opt - 1][1]
class Purpose(StrEnum):
UPLOADING = "uploading"
LOGGING = "logging"
class PortType(StrEnum):
SERIAL = "SERIAL"
NETWORK = "NETWORK"
MQTT = "MQTT"
MQTTIP = "MQTTIP"
# Magic MQTT port types that require special handling
_MQTT_PORT_TYPES = frozenset({PortType.MQTT, PortType.MQTTIP})
def _resolve_with_cache(address: str, purpose: Purpose) -> list[str]:
"""Resolve an address using cache if available, otherwise return the address itself."""
if CORE.address_cache and (cached := CORE.address_cache.get_addresses(address)):
_LOGGER.debug("Using cached addresses for %s: %s", purpose.value, cached)
return cached
return [address]
def choose_upload_log_host(
default: list[str] | str | None,
check_default: str | None,
purpose: Purpose,
show_ota: bool,
show_mqtt: bool,
show_api: bool,
purpose: str | None = None,
) -> list[str]:
# Convert to list for uniform handling
defaults = [default] if isinstance(default, str) else default or []
@@ -159,199 +132,53 @@ def choose_upload_log_host(
]
resolved.append(choose_prompt(options, purpose=purpose))
elif device == "OTA":
# ensure IP adresses are used first
if is_ip_address(CORE.address) and (
(purpose == Purpose.LOGGING and has_api())
or (purpose == Purpose.UPLOADING and has_ota())
if (show_ota and "ota" in CORE.config) or (
show_api and "api" in CORE.config
):
resolved.extend(_resolve_with_cache(CORE.address, purpose))
if purpose == Purpose.LOGGING:
if has_api() and has_mqtt_ip_lookup():
resolved.append("MQTTIP")
if has_mqtt_logging():
resolved.append("MQTT")
if has_api() and has_non_ip_address():
resolved.extend(_resolve_with_cache(CORE.address, purpose))
elif purpose == Purpose.UPLOADING:
if has_ota() and has_mqtt_ip_lookup():
resolved.append("MQTTIP")
if has_ota() and has_non_ip_address():
resolved.extend(_resolve_with_cache(CORE.address, purpose))
resolved.append(CORE.address)
elif show_mqtt and has_mqtt_logging():
resolved.append("MQTT")
else:
resolved.append(device)
if not resolved:
_LOGGER.error("All specified devices: %s could not be resolved.", defaults)
return resolved
# No devices specified, show interactive chooser
options = [
(f"{port.path} ({port.description})", port.path) for port in get_serial_ports()
]
if purpose == Purpose.LOGGING:
if has_mqtt_logging():
mqtt_config = CORE.config[CONF_MQTT]
options.append((f"MQTT ({mqtt_config[CONF_BROKER]})", "MQTT"))
if has_api():
if has_resolvable_address():
options.append((f"Over The Air ({CORE.address})", CORE.address))
if has_mqtt_ip_lookup():
options.append(("Over The Air (MQTT IP lookup)", "MQTTIP"))
elif purpose == Purpose.UPLOADING and has_ota():
if has_resolvable_address():
options.append((f"Over The Air ({CORE.address})", CORE.address))
if has_mqtt_ip_lookup():
options.append(("Over The Air (MQTT IP lookup)", "MQTTIP"))
if (show_ota and "ota" in CORE.config) or (show_api and "api" in CORE.config):
options.append((f"Over The Air ({CORE.address})", CORE.address))
if show_mqtt and has_mqtt_logging():
mqtt_config = CORE.config[CONF_MQTT]
options.append((f"MQTT ({mqtt_config[CONF_BROKER]})", "MQTT"))
if check_default is not None and check_default in [opt[1] for opt in options]:
return [check_default]
return [choose_prompt(options, purpose=purpose)]
def has_mqtt_logging() -> bool:
"""Check if MQTT logging is available."""
if CONF_MQTT not in CORE.config:
return False
mqtt_config = CORE.config[CONF_MQTT]
# enabled by default
if CONF_LOG_TOPIC not in mqtt_config:
return True
def mqtt_logging_enabled(mqtt_config):
log_topic = mqtt_config[CONF_LOG_TOPIC]
if log_topic is None:
return False
if CONF_TOPIC not in log_topic:
return False
return log_topic.get(CONF_LEVEL, None) != "NONE"
def has_mqtt() -> bool:
"""Check if MQTT is available."""
return CONF_MQTT in CORE.config
def has_mqtt_logging() -> bool:
"""Check if MQTT logging is available."""
return (mqtt_config := CORE.config.get(CONF_MQTT)) and mqtt_logging_enabled(
mqtt_config
)
def has_api() -> bool:
"""Check if API is available."""
return CONF_API in CORE.config
def has_ota() -> bool:
"""Check if OTA is available."""
return CONF_OTA in CORE.config
def has_mqtt_ip_lookup() -> bool:
"""Check if MQTT is available and IP lookup is supported."""
from esphome.components.mqtt import CONF_DISCOVER_IP
if CONF_MQTT not in CORE.config:
return False
# Default Enabled
if CONF_DISCOVER_IP not in CORE.config[CONF_MQTT]:
return True
return CORE.config[CONF_MQTT][CONF_DISCOVER_IP]
def has_mdns() -> bool:
"""Check if MDNS is available."""
return CONF_MDNS not in CORE.config or not CORE.config[CONF_MDNS][CONF_DISABLED]
def has_non_ip_address() -> bool:
"""Check if CORE.address is set and is not an IP address."""
return CORE.address is not None and not is_ip_address(CORE.address)
def has_ip_address() -> bool:
"""Check if CORE.address is a valid IP address."""
return CORE.address is not None and is_ip_address(CORE.address)
def has_resolvable_address() -> bool:
"""Check if CORE.address is resolvable (via mDNS, DNS, or is an IP address)."""
# Any address (IP, mDNS hostname, or regular DNS hostname) is resolvable
# The resolve_ip_address() function in helpers.py handles all types via AsyncResolver
return CORE.address is not None
def mqtt_get_ip(config: ConfigType, username: str, password: str, client_id: str):
from esphome import mqtt
return mqtt.get_esphome_device_ip(config, username, password, client_id)
def _resolve_network_devices(
devices: list[str], config: ConfigType, args: ArgsProtocol
) -> list[str]:
"""Resolve device list, converting MQTT magic strings to actual IP addresses.
This function filters the devices list to:
- Replace MQTT/MQTTIP magic strings with actual IP addresses via MQTT lookup
- Deduplicate addresses while preserving order
- Only resolve MQTT once even if multiple MQTT strings are present
- If MQTT resolution fails, log a warning and continue with other devices
Args:
devices: List of device identifiers (IPs, hostnames, or magic strings)
config: ESPHome configuration
args: Command-line arguments containing MQTT credentials
Returns:
List of network addresses suitable for connection attempts
"""
network_devices: list[str] = []
mqtt_resolved: bool = False
for device in devices:
port_type = get_port_type(device)
if port_type in _MQTT_PORT_TYPES:
# Only resolve MQTT once, even if multiple MQTT entries
if not mqtt_resolved:
try:
mqtt_ips = mqtt_get_ip(
config, args.username, args.password, args.client_id
)
network_devices.extend(mqtt_ips)
except EsphomeError as err:
_LOGGER.warning(
"MQTT IP discovery failed (%s), will try other devices if available",
err,
)
mqtt_resolved = True
elif device not in network_devices:
# Regular network address or IP - add if not already present
network_devices.append(device)
return network_devices
def get_port_type(port: str) -> PortType:
"""Determine the type of port/device identifier.
Returns:
PortType.SERIAL for serial ports (/dev/ttyUSB0, COM1, etc.)
PortType.MQTT for MQTT logging
PortType.MQTTIP for MQTT IP lookup
PortType.NETWORK for IP addresses, hostnames, or mDNS names
"""
def get_port_type(port: str) -> str:
if port.startswith("/") or port.startswith("COM"):
return PortType.SERIAL
return "SERIAL"
if port == "MQTT":
return PortType.MQTT
if port == "MQTTIP":
return PortType.MQTTIP
return PortType.NETWORK
return "MQTT"
return "NETWORK"
def run_miniterm(config: ConfigType, port: str, args) -> int:
@@ -396,9 +223,7 @@ def run_miniterm(config: ConfigType, port: str, args) -> int:
.replace(b"\n", b"")
.decode("utf8", "backslashreplace")
)
time_ = datetime.now()
nanoseconds = time_.microsecond // 1000
time_str = f"[{time_.hour:02}:{time_.minute:02}:{time_.second:02}.{nanoseconds:03}]"
time_str = datetime.now().time().strftime("[%H:%M:%S]")
safe_print(parser.parse_line(line, time_str))
backtrace_state = platformio_api.process_stacktrace(
@@ -466,9 +291,7 @@ def write_cpp_file() -> int:
def compile_program(args: ArgsProtocol, config: ConfigType) -> int:
from esphome import platformio_api
# NOTE: "Build path:" format is parsed by script/ci_memory_impact_extract.py
# If you change this format, update the regex in that script as well
_LOGGER.info("Compiling app... Build path: %s", CORE.build_path)
_LOGGER.info("Compiling app...")
rc = platformio_api.run_compile(config, CORE.verbose)
if rc != 0:
return rc
@@ -523,7 +346,7 @@ def upload_using_esptool(
"detect",
]
for img in flash_images:
cmd += [img.offset, str(img.path)]
cmd += [img.offset, img.path]
if os.environ.get("ESPHOME_USE_SUBPROCESS") is None:
import esptool
@@ -553,7 +376,7 @@ def upload_using_platformio(config: ConfigType, port: str):
def check_permissions(port: str):
if os.name == "posix" and get_port_type(port) == PortType.SERIAL:
if os.name == "posix" and get_port_type(port) == "SERIAL":
# Check if we can open selected serial port
if not os.access(port, os.F_OK):
raise EsphomeError(
@@ -570,29 +393,27 @@ def check_permissions(port: str):
)
def upload_program(
config: ConfigType, args: ArgsProtocol, devices: list[str]
) -> tuple[int, str | None]:
host = devices[0]
def upload_program(config: ConfigType, args: ArgsProtocol, host: str) -> int | str:
try:
module = importlib.import_module("esphome.components." + CORE.target_platform)
if getattr(module, "upload_program")(config, args, host):
return 0, host
return 0
except AttributeError:
pass
if get_port_type(host) == PortType.SERIAL:
if get_port_type(host) == "SERIAL":
check_permissions(host)
exit_code = 1
if CORE.target_platform in (PLATFORM_ESP32, PLATFORM_ESP8266):
file = getattr(args, "file", None)
exit_code = upload_using_esptool(config, host, file, args.upload_speed)
elif CORE.target_platform == PLATFORM_RP2040 or CORE.is_libretiny:
exit_code = upload_using_platformio(config, host)
# else: Unknown target platform, exit_code remains 1
return upload_using_esptool(config, host, file, args.upload_speed)
return exit_code, host if exit_code == 0 else None
if CORE.target_platform in (PLATFORM_RP2040):
return upload_using_platformio(config, host)
if CORE.is_libretiny:
return upload_using_platformio(config, host)
return 1 # Unknown target platform
ota_conf = {}
for ota_item in config.get(CONF_OTA, []):
@@ -608,46 +429,54 @@ def upload_program(
from esphome import espota2
remote_port = int(ota_conf[CONF_PORT])
password = ota_conf.get(CONF_PASSWORD)
password = ota_conf.get(CONF_PASSWORD, "")
# Check if we should use MQTT for address resolution
# This happens when no device was specified, or the current host is "MQTT"/"OTA"
devices: list[str] = args.device or []
if (
CONF_MQTT in config # pylint: disable=too-many-boolean-expressions
and (not devices or host in ("MQTT", "OTA"))
and (
((config[CONF_MDNS][CONF_DISABLED]) and not is_ip_address(CORE.address))
or get_port_type(host) == "MQTT"
)
):
from esphome import mqtt
host = mqtt.get_esphome_device_ip(
config, args.username, args.password, args.client_id
)
if getattr(args, "file", None) is not None:
binary = Path(args.file)
else:
binary = CORE.firmware_bin
return espota2.run_ota(host, remote_port, password, args.file)
# Resolve MQTT magic strings to actual IP addresses
network_devices = _resolve_network_devices(devices, config, args)
return espota2.run_ota(network_devices, remote_port, password, binary)
return espota2.run_ota(host, remote_port, password, CORE.firmware_bin)
def show_logs(config: ConfigType, args: ArgsProtocol, devices: list[str]) -> int | None:
try:
module = importlib.import_module("esphome.components." + CORE.target_platform)
if getattr(module, "show_logs")(config, args, devices):
return 0
except AttributeError:
pass
if "logger" not in config:
raise EsphomeError("Logger is not configured!")
port = devices[0]
port_type = get_port_type(port)
if port_type == PortType.SERIAL:
if get_port_type(port) == "SERIAL":
check_permissions(port)
return run_miniterm(config, port, args)
if get_port_type(port) == "NETWORK" and "api" in config:
addresses_to_use = devices
if config[CONF_MDNS][CONF_DISABLED] and CONF_MQTT in config:
from esphome import mqtt
mqtt_address = mqtt.get_esphome_device_ip(
config, args.username, args.password, args.client_id
)[0]
addresses_to_use = [mqtt_address]
# Check if we should use API for logging
# Resolve MQTT magic strings to actual IP addresses
if has_api() and (
network_devices := _resolve_network_devices(devices, config, args)
):
from esphome.components.api.client import run_logs
return run_logs(config, network_devices)
if port_type in (PortType.NETWORK, PortType.MQTT) and has_mqtt_logging():
return run_logs(config, addresses_to_use)
if get_port_type(port) == "MQTT" and "mqtt" in config:
from esphome import mqtt
return mqtt.show_logs(
@@ -668,7 +497,7 @@ def clean_mqtt(config: ConfigType, args: ArgsProtocol) -> int | None:
def command_wizard(args: ArgsProtocol) -> int | None:
from esphome import wizard
return wizard.wizard(Path(args.configuration))
return wizard.wizard(args.configuration)
def command_config(args: ArgsProtocol, config: ConfigType) -> int | None:
@@ -713,14 +542,23 @@ def command_upload(args: ArgsProtocol, config: ConfigType) -> int | None:
devices = choose_upload_log_host(
default=args.device,
check_default=None,
purpose=Purpose.UPLOADING,
show_ota=True,
show_mqtt=False,
show_api=False,
purpose="uploading",
)
exit_code, _ = upload_program(config, args, devices)
if exit_code == 0:
_LOGGER.info("Successfully uploaded program.")
else:
_LOGGER.warning("Failed to upload to %s", devices)
# Try each device until one succeeds
exit_code = 1
for device in devices:
_LOGGER.info("Uploading to %s", device)
exit_code = upload_program(config, args, device)
if exit_code == 0:
_LOGGER.info("Successfully uploaded program.")
return 0
if len(devices) > 1:
_LOGGER.warning("Failed to upload to %s", device)
return exit_code
@@ -738,7 +576,10 @@ def command_logs(args: ArgsProtocol, config: ConfigType) -> int | None:
devices = choose_upload_log_host(
default=args.device,
check_default=None,
purpose=Purpose.LOGGING,
show_ota=False,
show_mqtt=True,
show_api=True,
purpose="logging",
)
return show_logs(config, args, devices)
@@ -764,14 +605,25 @@ def command_run(args: ArgsProtocol, config: ConfigType) -> int | None:
devices = choose_upload_log_host(
default=args.device,
check_default=None,
purpose=Purpose.UPLOADING,
show_ota=True,
show_mqtt=False,
show_api=True,
purpose="uploading",
)
exit_code, successful_device = upload_program(config, args, devices)
if exit_code == 0:
_LOGGER.info("Successfully uploaded program.")
else:
_LOGGER.warning("Failed to upload to %s", devices)
# Try each device for upload until one succeeds
successful_device: str | None = None
for device in devices:
_LOGGER.info("Uploading to %s", device)
exit_code = upload_program(config, args, device)
if exit_code == 0:
_LOGGER.info("Successfully uploaded program.")
successful_device = device
break
if len(devices) > 1:
_LOGGER.warning("Failed to upload to %s", device)
if successful_device is None:
return exit_code
if args.no_logs:
@@ -781,7 +633,10 @@ def command_run(args: ArgsProtocol, config: ConfigType) -> int | None:
devices = choose_upload_log_host(
default=successful_device,
check_default=successful_device,
purpose=Purpose.LOGGING,
show_ota=False,
show_mqtt=True,
show_api=True,
purpose="logging",
)
return show_logs(config, args, devices)
@@ -790,16 +645,6 @@ def command_clean_mqtt(args: ArgsProtocol, config: ConfigType) -> int | None:
return clean_mqtt(config, args)
def command_clean_all(args: ArgsProtocol) -> int | None:
try:
writer.clean_all(args.configuration)
except OSError as err:
_LOGGER.error("Error cleaning all files: %s", err)
return 1
_LOGGER.info("Done!")
return 0
def command_mqtt_fingerprint(args: ArgsProtocol, config: ConfigType) -> int | None:
from esphome import mqtt
@@ -841,7 +686,7 @@ def command_update_all(args: ArgsProtocol) -> int | None:
safe_print(f"{half_line}{middle_text}{half_line}")
for f in files:
safe_print(f"Updating {color(AnsiFore.CYAN, str(f))}")
safe_print(f"Updating {color(AnsiFore.CYAN, f)}")
safe_print("-" * twidth)
safe_print()
if CORE.dashboard:
@@ -853,10 +698,10 @@ def command_update_all(args: ArgsProtocol) -> int | None:
"esphome", "run", f, "--no-logs", "--device", "OTA"
)
if rc == 0:
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {str(f)}")
print_bar(f"[{color(AnsiFore.BOLD_GREEN, 'SUCCESS')}] {f}")
success[f] = True
else:
print_bar(f"[{color(AnsiFore.BOLD_RED, 'ERROR')}] {str(f)}")
print_bar(f"[{color(AnsiFore.BOLD_RED, 'ERROR')}] {f}")
success[f] = False
safe_print()
@@ -867,9 +712,9 @@ def command_update_all(args: ArgsProtocol) -> int | None:
failed = 0
for f in files:
if success[f]:
safe_print(f" - {str(f)}: {color(AnsiFore.GREEN, 'SUCCESS')}")
safe_print(f" - {f}: {color(AnsiFore.GREEN, 'SUCCESS')}")
else:
safe_print(f" - {str(f)}: {color(AnsiFore.BOLD_RED, 'FAILED')}")
safe_print(f" - {f}: {color(AnsiFore.BOLD_RED, 'FAILED')}")
failed += 1
return failed
@@ -891,8 +736,7 @@ def command_idedata(args: ArgsProtocol, config: ConfigType) -> int:
def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
new_name = args.name
for c in new_name:
for c in args.name:
if c not in ALLOWED_NAME_CHARS:
print(
color(
@@ -903,7 +747,8 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
)
return 1
# Load existing yaml file
raw_contents = CORE.config_path.read_text(encoding="utf-8")
with open(CORE.config_path, mode="r+", encoding="utf-8") as raw_file:
raw_contents = raw_file.read()
yaml = yaml_util.load_yaml(CORE.config_path)
if CONF_ESPHOME not in yaml or CONF_NAME not in yaml[CONF_ESPHOME]:
@@ -918,7 +763,7 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
if match is None:
new_raw = re.sub(
rf"name:\s+[\"']?{old_name}[\"']?",
f'name: "{new_name}"',
f'name: "{args.name}"',
raw_contents,
)
else:
@@ -938,28 +783,29 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
new_raw = re.sub(
rf"^(\s+{match.group(1)}):\s+[\"']?{old_name}[\"']?",
f'\\1: "{new_name}"',
f'\\1: "{args.name}"',
raw_contents,
flags=re.MULTILINE,
)
new_path: Path = CORE.config_dir / (new_name + ".yaml")
new_path = os.path.join(CORE.config_dir, args.name + ".yaml")
print(
f"Updating {color(AnsiFore.CYAN, str(CORE.config_path))} to {color(AnsiFore.CYAN, str(new_path))}"
f"Updating {color(AnsiFore.CYAN, CORE.config_path)} to {color(AnsiFore.CYAN, new_path)}"
)
print()
new_path.write_text(new_raw, encoding="utf-8")
with open(new_path, mode="w", encoding="utf-8") as new_file:
new_file.write(new_raw)
rc = run_external_process("esphome", "config", str(new_path))
rc = run_external_process("esphome", "config", new_path)
if rc != 0:
print(color(AnsiFore.BOLD_RED, "Rename failed. Reverting changes."))
new_path.unlink()
os.remove(new_path)
return 1
cli_args = [
"run",
str(new_path),
new_path,
"--no-logs",
"--device",
CORE.address,
@@ -973,11 +819,11 @@ def command_rename(args: ArgsProtocol, config: ConfigType) -> int | None:
except KeyboardInterrupt:
rc = 1
if rc != 0:
new_path.unlink()
os.remove(new_path)
return 1
if CORE.config_path != new_path:
CORE.config_path.unlink()
os.remove(CORE.config_path)
print(color(AnsiFore.BOLD_GREEN, "SUCCESS"))
print()
@@ -990,7 +836,6 @@ PRE_CONFIG_ACTIONS = {
"dashboard": command_dashboard,
"vscode": command_vscode,
"update-all": command_update_all,
"clean-all": command_clean_all,
}
POST_CONFIG_ACTIONS = {
@@ -999,9 +844,9 @@ POST_CONFIG_ACTIONS = {
"upload": command_upload,
"logs": command_logs,
"run": command_run,
"clean": command_clean,
"clean-mqtt": command_clean_mqtt,
"mqtt-fingerprint": command_mqtt_fingerprint,
"clean": command_clean,
"idedata": command_idedata,
"rename": command_rename,
"discover": command_discover,
@@ -1045,24 +890,6 @@ def parse_args(argv):
help="Add a substitution",
metavar=("key", "value"),
)
options_parser.add_argument(
"--mdns-address-cache",
help="mDNS address cache mapping in format 'hostname=ip1,ip2'",
action="append",
default=[],
)
options_parser.add_argument(
"--dns-address-cache",
help="DNS address cache mapping in format 'hostname=ip1,ip2'",
action="append",
default=[],
)
options_parser.add_argument(
"--testing-mode",
help="Enable testing mode (disables validation checks for grouped component testing)",
action="store_true",
default=False,
)
parser = argparse.ArgumentParser(
description=f"ESPHome {const.__version__}", parents=[options_parser]
@@ -1220,13 +1047,6 @@ def parse_args(argv):
"configuration", help="Your YAML configuration file(s).", nargs="+"
)
parser_clean_all = subparsers.add_parser(
"clean-all", help="Clean all build and platform files."
)
parser_clean_all.add_argument(
"configuration", help="Your YAML configuration directory.", nargs="*"
)
parser_dashboard = subparsers.add_parser(
"dashboard", help="Create a simple web server for a dashboard."
)
@@ -1273,7 +1093,7 @@ def parse_args(argv):
parser_update = subparsers.add_parser("update-all")
parser_update.add_argument(
"configuration", help="Your YAML configuration file or directory.", nargs="+"
"configuration", help="Your YAML configuration file directories.", nargs="+"
)
parser_idedata = subparsers.add_parser("idedata")
@@ -1317,16 +1137,9 @@ def parse_args(argv):
def run_esphome(argv):
from esphome.address_cache import AddressCache
args = parse_args(argv)
CORE.dashboard = args.dashboard
CORE.testing_mode = args.testing_mode
# Create address cache from command-line arguments
CORE.address_cache = AddressCache.from_cli_args(
args.mdns_address_cache, args.dns_address_cache
)
# Override log level if verbose is set
if args.verbose:
args.log_level = "DEBUG"
@@ -1349,20 +1162,14 @@ def run_esphome(argv):
_LOGGER.info("ESPHome %s", const.__version__)
for conf_path in args.configuration:
conf_path = Path(conf_path)
if any(conf_path.name == x for x in SECRETS_FILES):
if any(os.path.basename(conf_path) == x for x in SECRETS_FILES):
_LOGGER.warning("Skipping secrets file %s", conf_path)
continue
CORE.config_path = conf_path
CORE.dashboard = args.dashboard
# For logs command, skip updating external components
skip_external = args.command == "logs"
config = read_config(
dict(args.substitution) if args.substitution else {},
skip_external_update=skip_external,
)
config = read_config(dict(args.substitution) if args.substitution else {})
if config is None:
return 2
CORE.config = config

View File

@@ -1,142 +0,0 @@
"""Address cache for DNS and mDNS lookups."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from collections.abc import Iterable
_LOGGER = logging.getLogger(__name__)
def normalize_hostname(hostname: str) -> str:
"""Normalize hostname for cache lookups.
Removes trailing dots and converts to lowercase.
"""
return hostname.rstrip(".").lower()
class AddressCache:
"""Cache for DNS and mDNS address lookups.
This cache stores pre-resolved addresses from command-line arguments
to avoid slow DNS/mDNS lookups during builds.
"""
def __init__(
self,
mdns_cache: dict[str, list[str]] | None = None,
dns_cache: dict[str, list[str]] | None = None,
) -> None:
"""Initialize the address cache.
Args:
mdns_cache: Pre-populated mDNS addresses (hostname -> IPs)
dns_cache: Pre-populated DNS addresses (hostname -> IPs)
"""
self.mdns_cache = mdns_cache or {}
self.dns_cache = dns_cache or {}
def _get_cached_addresses(
self, hostname: str, cache: dict[str, list[str]], cache_type: str
) -> list[str] | None:
"""Get cached addresses from a specific cache.
Args:
hostname: The hostname to look up
cache: The cache dictionary to check
cache_type: Type of cache for logging ("mDNS" or "DNS")
Returns:
List of IP addresses if found in cache, None otherwise
"""
normalized = normalize_hostname(hostname)
if addresses := cache.get(normalized):
_LOGGER.debug("Using %s cache for %s: %s", cache_type, hostname, addresses)
return addresses
return None
def get_mdns_addresses(self, hostname: str) -> list[str] | None:
"""Get cached mDNS addresses for a hostname.
Args:
hostname: The hostname to look up (should end with .local)
Returns:
List of IP addresses if found in cache, None otherwise
"""
return self._get_cached_addresses(hostname, self.mdns_cache, "mDNS")
def get_dns_addresses(self, hostname: str) -> list[str] | None:
"""Get cached DNS addresses for a hostname.
Args:
hostname: The hostname to look up
Returns:
List of IP addresses if found in cache, None otherwise
"""
return self._get_cached_addresses(hostname, self.dns_cache, "DNS")
def get_addresses(self, hostname: str) -> list[str] | None:
"""Get cached addresses for a hostname.
Checks mDNS cache for .local domains, DNS cache otherwise.
Args:
hostname: The hostname to look up
Returns:
List of IP addresses if found in cache, None otherwise
"""
normalized = normalize_hostname(hostname)
if normalized.endswith(".local"):
return self.get_mdns_addresses(hostname)
return self.get_dns_addresses(hostname)
def has_cache(self) -> bool:
"""Check if any cache entries exist."""
return bool(self.mdns_cache or self.dns_cache)
@classmethod
def from_cli_args(
cls, mdns_args: Iterable[str], dns_args: Iterable[str]
) -> AddressCache:
"""Create cache from command-line arguments.
Args:
mdns_args: List of mDNS cache entries like ['host=ip1,ip2']
dns_args: List of DNS cache entries like ['host=ip1,ip2']
Returns:
Configured AddressCache instance
"""
mdns_cache = cls._parse_cache_args(mdns_args)
dns_cache = cls._parse_cache_args(dns_args)
return cls(mdns_cache=mdns_cache, dns_cache=dns_cache)
@staticmethod
def _parse_cache_args(cache_args: Iterable[str]) -> dict[str, list[str]]:
"""Parse cache arguments into a dictionary.
Args:
cache_args: List of cache mappings like ['host1=ip1,ip2', 'host2=ip3']
Returns:
Dictionary mapping normalized hostnames to list of IP addresses
"""
cache: dict[str, list[str]] = {}
for arg in cache_args:
if "=" not in arg:
_LOGGER.warning(
"Invalid cache format: %s (expected 'hostname=ip1,ip2')", arg
)
continue
hostname, ips = arg.split("=", 1)
# Normalize hostname for consistent lookups
normalized = normalize_hostname(hostname)
cache[normalized] = [ip.strip() for ip in ips.split(",")]
return cache

View File

@@ -1,502 +0,0 @@
"""Memory usage analyzer for ESPHome compiled binaries."""
from collections import defaultdict
from dataclasses import dataclass, field
import logging
from pathlib import Path
import re
import subprocess
from typing import TYPE_CHECKING
from .const import (
CORE_SUBCATEGORY_PATTERNS,
DEMANGLED_PATTERNS,
ESPHOME_COMPONENT_PATTERN,
SECTION_TO_ATTR,
SYMBOL_PATTERNS,
)
from .helpers import (
get_component_class_patterns,
get_esphome_components,
map_section_name,
parse_symbol_line,
)
if TYPE_CHECKING:
from esphome.platformio_api import IDEData
_LOGGER = logging.getLogger(__name__)
# GCC global constructor/destructor prefix annotations
_GCC_PREFIX_ANNOTATIONS = {
"_GLOBAL__sub_I_": "global constructor for",
"_GLOBAL__sub_D_": "global destructor for",
}
# GCC optimization suffix pattern (e.g., $isra$0, $part$1, $constprop$2)
_GCC_OPTIMIZATION_SUFFIX_PATTERN = re.compile(r"(\$(?:isra|part|constprop)\$\d+)")
# C++ runtime patterns for categorization
_CPP_RUNTIME_PATTERNS = frozenset(["vtable", "typeinfo", "thunk"])
# libc printf/scanf family base names (used to detect variants like _printf_r, vfprintf, etc.)
_LIBC_PRINTF_SCANF_FAMILY = frozenset(["printf", "fprintf", "sprintf", "scanf"])
# Regex pattern for parsing readelf section headers
# Format: [ #] name type addr off size
_READELF_SECTION_PATTERN = re.compile(
r"\s*\[\s*\d+\]\s+([\.\w]+)\s+\w+\s+[\da-fA-F]+\s+[\da-fA-F]+\s+([\da-fA-F]+)"
)
# Component category prefixes
_COMPONENT_PREFIX_ESPHOME = "[esphome]"
_COMPONENT_PREFIX_EXTERNAL = "[external]"
_COMPONENT_CORE = f"{_COMPONENT_PREFIX_ESPHOME}core"
_COMPONENT_API = f"{_COMPONENT_PREFIX_ESPHOME}api"
# C++ namespace prefixes
_NAMESPACE_ESPHOME = "esphome::"
_NAMESPACE_STD = "std::"
# Type alias for symbol information: (symbol_name, size, component)
SymbolInfoType = tuple[str, int, str]
@dataclass
class MemorySection:
"""Represents a memory section with its symbols."""
name: str
symbols: list[SymbolInfoType] = field(default_factory=list)
total_size: int = 0
@dataclass
class ComponentMemory:
"""Tracks memory usage for a component."""
name: str
text_size: int = 0 # Code in flash
rodata_size: int = 0 # Read-only data in flash
data_size: int = 0 # Initialized data (flash + ram)
bss_size: int = 0 # Uninitialized data (ram only)
symbol_count: int = 0
@property
def flash_total(self) -> int:
"""Total flash usage (text + rodata + data)."""
return self.text_size + self.rodata_size + self.data_size
@property
def ram_total(self) -> int:
"""Total RAM usage (data + bss)."""
return self.data_size + self.bss_size
class MemoryAnalyzer:
"""Analyzes memory usage from ELF files."""
def __init__(
self,
elf_path: str,
objdump_path: str | None = None,
readelf_path: str | None = None,
external_components: set[str] | None = None,
idedata: "IDEData | None" = None,
) -> None:
"""Initialize memory analyzer.
Args:
elf_path: Path to ELF file to analyze
objdump_path: Path to objdump binary (auto-detected from idedata if not provided)
readelf_path: Path to readelf binary (auto-detected from idedata if not provided)
external_components: Set of external component names
idedata: Optional PlatformIO IDEData object to auto-detect toolchain paths
"""
self.elf_path = Path(elf_path)
if not self.elf_path.exists():
raise FileNotFoundError(f"ELF file not found: {elf_path}")
# Auto-detect toolchain paths from idedata if not provided
if idedata is not None and (objdump_path is None or readelf_path is None):
objdump_path = objdump_path or idedata.objdump_path
readelf_path = readelf_path or idedata.readelf_path
_LOGGER.debug("Using toolchain paths from PlatformIO idedata")
self.objdump_path = objdump_path or "objdump"
self.readelf_path = readelf_path or "readelf"
self.external_components = external_components or set()
self.sections: dict[str, MemorySection] = {}
self.components: dict[str, ComponentMemory] = defaultdict(
lambda: ComponentMemory("")
)
self._demangle_cache: dict[str, str] = {}
self._uncategorized_symbols: list[tuple[str, str, int]] = []
self._esphome_core_symbols: list[
tuple[str, str, int]
] = [] # Track core symbols
self._component_symbols: dict[str, list[tuple[str, str, int]]] = defaultdict(
list
) # Track symbols for all components
def analyze(self) -> dict[str, ComponentMemory]:
"""Analyze the ELF file and return component memory usage."""
self._parse_sections()
self._parse_symbols()
self._categorize_symbols()
return dict(self.components)
def _parse_sections(self) -> None:
"""Parse section headers from ELF file."""
result = subprocess.run(
[self.readelf_path, "-S", str(self.elf_path)],
capture_output=True,
text=True,
check=True,
)
# Parse section headers
for line in result.stdout.splitlines():
# Look for section entries
if not (match := _READELF_SECTION_PATTERN.match(line)):
continue
section_name = match.group(1)
size_hex = match.group(2)
size = int(size_hex, 16)
# Map to standard section name
mapped_section = map_section_name(section_name)
if not mapped_section:
continue
if mapped_section not in self.sections:
self.sections[mapped_section] = MemorySection(mapped_section)
self.sections[mapped_section].total_size += size
def _parse_symbols(self) -> None:
"""Parse symbols from ELF file."""
result = subprocess.run(
[self.objdump_path, "-t", str(self.elf_path)],
capture_output=True,
text=True,
check=True,
)
# Track seen addresses to avoid duplicates
seen_addresses: set[str] = set()
for line in result.stdout.splitlines():
if not (symbol_info := parse_symbol_line(line)):
continue
section, name, size, address = symbol_info
# Skip duplicate symbols at the same address (e.g., C1/C2 constructors)
if address in seen_addresses or section not in self.sections:
continue
self.sections[section].symbols.append((name, size, ""))
seen_addresses.add(address)
def _categorize_symbols(self) -> None:
"""Categorize symbols by component."""
# First, collect all unique symbol names for batch demangling
all_symbols = {
symbol_name
for section in self.sections.values()
for symbol_name, _, _ in section.symbols
}
# Batch demangle all symbols at once
self._batch_demangle_symbols(list(all_symbols))
# Now categorize with cached demangled names
for section_name, section in self.sections.items():
for symbol_name, size, _ in section.symbols:
component = self._identify_component(symbol_name)
if component not in self.components:
self.components[component] = ComponentMemory(component)
comp_mem = self.components[component]
comp_mem.symbol_count += 1
# Update the appropriate size attribute based on section
if attr_name := SECTION_TO_ATTR.get(section_name):
setattr(comp_mem, attr_name, getattr(comp_mem, attr_name) + size)
# Track uncategorized symbols
if component == "other" and size > 0:
demangled = self._demangle_symbol(symbol_name)
self._uncategorized_symbols.append((symbol_name, demangled, size))
# Track ESPHome core symbols for detailed analysis
if component == _COMPONENT_CORE and size > 0:
demangled = self._demangle_symbol(symbol_name)
self._esphome_core_symbols.append((symbol_name, demangled, size))
# Track all component symbols for detailed analysis
if size > 0:
demangled = self._demangle_symbol(symbol_name)
self._component_symbols[component].append(
(symbol_name, demangled, size)
)
def _identify_component(self, symbol_name: str) -> str:
"""Identify which component a symbol belongs to."""
# Demangle C++ names if needed
demangled = self._demangle_symbol(symbol_name)
# Check for special component classes first (before namespace pattern)
# This handles cases like esphome::ESPHomeOTAComponent which should map to ota
if _NAMESPACE_ESPHOME in demangled:
# Check for special component classes that include component name in the class
# For example: esphome::ESPHomeOTAComponent -> ota component
for component_name in get_esphome_components():
patterns = get_component_class_patterns(component_name)
if any(pattern in demangled for pattern in patterns):
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
# Check for ESPHome component namespaces
match = ESPHOME_COMPONENT_PATTERN.search(demangled)
if match:
component_name = match.group(1)
# Strip trailing underscore if present (e.g., switch_ -> switch)
component_name = component_name.rstrip("_")
# Check if this is an actual component in the components directory
if component_name in get_esphome_components():
return f"{_COMPONENT_PREFIX_ESPHOME}{component_name}"
# Check if this is a known external component from the config
if component_name in self.external_components:
return f"{_COMPONENT_PREFIX_EXTERNAL}{component_name}"
# Everything else in esphome:: namespace is core
return _COMPONENT_CORE
# Check for esphome core namespace (no component namespace)
if _NAMESPACE_ESPHOME in demangled:
# If no component match found, it's core
return _COMPONENT_CORE
# Check against symbol patterns
for component, patterns in SYMBOL_PATTERNS.items():
if any(pattern in symbol_name for pattern in patterns):
return component
# Check against demangled patterns
for component, patterns in DEMANGLED_PATTERNS.items():
if any(pattern in demangled for pattern in patterns):
return component
# Special cases that need more complex logic
# Check if spi_flash vs spi_driver
if "spi_" in symbol_name or "SPI" in symbol_name:
return "spi_flash" if "spi_flash" in symbol_name else "spi_driver"
# libc special printf variants
if (
symbol_name.startswith("_")
and symbol_name[1:].replace("_r", "").replace("v", "").replace("s", "")
in _LIBC_PRINTF_SCANF_FAMILY
):
return "libc"
# Track uncategorized symbols for analysis
return "other"
def _batch_demangle_symbols(self, symbols: list[str]) -> None:
"""Batch demangle C++ symbol names for efficiency."""
if not symbols:
return
# Try to find the appropriate c++filt for the platform
cppfilt_cmd = "c++filt"
_LOGGER.info("Demangling %d symbols", len(symbols))
_LOGGER.debug("objdump_path = %s", self.objdump_path)
# Check if we have a toolchain-specific c++filt
if self.objdump_path and self.objdump_path != "objdump":
# Replace objdump with c++filt in the path
potential_cppfilt = self.objdump_path.replace("objdump", "c++filt")
_LOGGER.info("Checking for toolchain c++filt at: %s", potential_cppfilt)
if Path(potential_cppfilt).exists():
cppfilt_cmd = potential_cppfilt
_LOGGER.info("✓ Using toolchain c++filt: %s", cppfilt_cmd)
else:
_LOGGER.info(
"✗ Toolchain c++filt not found at %s, using system c++filt",
potential_cppfilt,
)
else:
_LOGGER.info("✗ Using system c++filt (objdump_path=%s)", self.objdump_path)
# Strip GCC optimization suffixes and prefixes before demangling
# Suffixes like $isra$0, $part$0, $constprop$0 confuse c++filt
# Prefixes like _GLOBAL__sub_I_ need to be removed and tracked
symbols_stripped: list[str] = []
symbols_prefixes: list[str] = [] # Track removed prefixes
for symbol in symbols:
# Remove GCC optimization markers
stripped = _GCC_OPTIMIZATION_SUFFIX_PATTERN.sub("", symbol)
# Handle GCC global constructor/initializer prefixes
# _GLOBAL__sub_I_<mangled> -> extract <mangled> for demangling
prefix = ""
for gcc_prefix in _GCC_PREFIX_ANNOTATIONS:
if stripped.startswith(gcc_prefix):
prefix = gcc_prefix
stripped = stripped[len(prefix) :]
break
symbols_stripped.append(stripped)
symbols_prefixes.append(prefix)
try:
# Send all symbols to c++filt at once
result = subprocess.run(
[cppfilt_cmd],
input="\n".join(symbols_stripped),
capture_output=True,
text=True,
check=False,
)
except (subprocess.SubprocessError, OSError, UnicodeDecodeError) as e:
# On error, cache originals
_LOGGER.warning("Failed to batch demangle symbols: %s", e)
for symbol in symbols:
self._demangle_cache[symbol] = symbol
return
if result.returncode != 0:
_LOGGER.warning(
"c++filt exited with code %d: %s",
result.returncode,
result.stderr[:200] if result.stderr else "(no error output)",
)
# Cache originals on failure
for symbol in symbols:
self._demangle_cache[symbol] = symbol
return
# Process demangled output
self._process_demangled_output(
symbols, symbols_stripped, symbols_prefixes, result.stdout, cppfilt_cmd
)
def _process_demangled_output(
self,
symbols: list[str],
symbols_stripped: list[str],
symbols_prefixes: list[str],
demangled_output: str,
cppfilt_cmd: str,
) -> None:
"""Process demangled symbol output and populate cache.
Args:
symbols: Original symbol names
symbols_stripped: Stripped symbol names sent to c++filt
symbols_prefixes: Removed prefixes to restore
demangled_output: Output from c++filt
cppfilt_cmd: Path to c++filt command (for logging)
"""
demangled_lines = demangled_output.strip().split("\n")
failed_count = 0
for original, stripped, prefix, demangled in zip(
symbols, symbols_stripped, symbols_prefixes, demangled_lines
):
# Add back any prefix that was removed
demangled = self._restore_symbol_prefix(prefix, stripped, demangled)
# If we stripped a suffix, add it back to the demangled name for clarity
if original != stripped and not prefix:
demangled = self._restore_symbol_suffix(original, demangled)
self._demangle_cache[original] = demangled
# Log symbols that failed to demangle (stayed the same as stripped version)
if stripped == demangled and stripped.startswith("_Z"):
failed_count += 1
if failed_count <= 5: # Only log first 5 failures
_LOGGER.warning("Failed to demangle: %s", original)
if failed_count == 0:
_LOGGER.info("Successfully demangled all %d symbols", len(symbols))
return
_LOGGER.warning(
"Failed to demangle %d/%d symbols using %s",
failed_count,
len(symbols),
cppfilt_cmd,
)
@staticmethod
def _restore_symbol_prefix(prefix: str, stripped: str, demangled: str) -> str:
"""Restore prefix that was removed before demangling.
Args:
prefix: Prefix that was removed (e.g., "_GLOBAL__sub_I_")
stripped: Stripped symbol name
demangled: Demangled symbol name
Returns:
Demangled name with prefix restored/annotated
"""
if not prefix:
return demangled
# Successfully demangled - add descriptive prefix
if demangled != stripped and (
annotation := _GCC_PREFIX_ANNOTATIONS.get(prefix)
):
return f"[{annotation}: {demangled}]"
# Failed to demangle - restore original prefix
return prefix + demangled
@staticmethod
def _restore_symbol_suffix(original: str, demangled: str) -> str:
"""Restore GCC optimization suffix that was removed before demangling.
Args:
original: Original symbol name with suffix
demangled: Demangled symbol name without suffix
Returns:
Demangled name with suffix annotation
"""
if suffix_match := _GCC_OPTIMIZATION_SUFFIX_PATTERN.search(original):
return f"{demangled} [{suffix_match.group(1)}]"
return demangled
def _demangle_symbol(self, symbol: str) -> str:
"""Get demangled C++ symbol name from cache."""
return self._demangle_cache.get(symbol, symbol)
def _categorize_esphome_core_symbol(self, demangled: str) -> str:
"""Categorize ESPHome core symbols into subcategories."""
# Special patterns that need to be checked separately
if any(pattern in demangled for pattern in _CPP_RUNTIME_PATTERNS):
return "C++ Runtime (vtables/RTTI)"
if demangled.startswith(_NAMESPACE_STD):
return "C++ STL"
# Check against patterns from const.py
for category, patterns in CORE_SUBCATEGORY_PATTERNS.items():
if any(pattern in demangled for pattern in patterns):
return category
return "Other Core"
if __name__ == "__main__":
from .cli import main
main()

View File

@@ -1,6 +0,0 @@
"""Main entry point for running the memory analyzer as a module."""
from .cli import main
if __name__ == "__main__":
main()

View File

@@ -1,408 +0,0 @@
"""CLI interface for memory analysis with report generation."""
from collections import defaultdict
import sys
from . import (
_COMPONENT_API,
_COMPONENT_CORE,
_COMPONENT_PREFIX_ESPHOME,
_COMPONENT_PREFIX_EXTERNAL,
MemoryAnalyzer,
)
class MemoryAnalyzerCLI(MemoryAnalyzer):
"""Memory analyzer with CLI-specific report generation."""
# Column width constants
COL_COMPONENT: int = 29
COL_FLASH_TEXT: int = 14
COL_FLASH_DATA: int = 14
COL_RAM_DATA: int = 12
COL_RAM_BSS: int = 12
COL_TOTAL_FLASH: int = 15
COL_TOTAL_RAM: int = 12
COL_SEPARATOR: int = 3 # " | "
# Core analysis column widths
COL_CORE_SUBCATEGORY: int = 30
COL_CORE_SIZE: int = 12
COL_CORE_COUNT: int = 6
COL_CORE_PERCENT: int = 10
# Calculate table width once at class level
TABLE_WIDTH: int = (
COL_COMPONENT
+ COL_SEPARATOR
+ COL_FLASH_TEXT
+ COL_SEPARATOR
+ COL_FLASH_DATA
+ COL_SEPARATOR
+ COL_RAM_DATA
+ COL_SEPARATOR
+ COL_RAM_BSS
+ COL_SEPARATOR
+ COL_TOTAL_FLASH
+ COL_SEPARATOR
+ COL_TOTAL_RAM
)
@staticmethod
def _make_separator_line(*widths: int) -> str:
"""Create a separator line with given column widths.
Args:
widths: Column widths to create separators for
Returns:
Separator line like "----+---------+-----"
"""
return "-+-".join("-" * width for width in widths)
# Pre-computed separator lines
MAIN_TABLE_SEPARATOR: str = _make_separator_line(
COL_COMPONENT,
COL_FLASH_TEXT,
COL_FLASH_DATA,
COL_RAM_DATA,
COL_RAM_BSS,
COL_TOTAL_FLASH,
COL_TOTAL_RAM,
)
CORE_TABLE_SEPARATOR: str = _make_separator_line(
COL_CORE_SUBCATEGORY,
COL_CORE_SIZE,
COL_CORE_COUNT,
COL_CORE_PERCENT,
)
def generate_report(self, detailed: bool = False) -> str:
"""Generate a formatted memory report."""
components = sorted(
self.components.items(), key=lambda x: x[1].flash_total, reverse=True
)
# Calculate totals
total_flash = sum(c.flash_total for _, c in components)
total_ram = sum(c.ram_total for _, c in components)
# Build report
lines: list[str] = []
lines.append("=" * self.TABLE_WIDTH)
lines.append("Component Memory Analysis".center(self.TABLE_WIDTH))
lines.append("=" * self.TABLE_WIDTH)
lines.append("")
# Main table - fixed column widths
lines.append(
f"{'Component':<{self.COL_COMPONENT}} | {'Flash (text)':>{self.COL_FLASH_TEXT}} | {'Flash (data)':>{self.COL_FLASH_DATA}} | {'RAM (data)':>{self.COL_RAM_DATA}} | {'RAM (bss)':>{self.COL_RAM_BSS}} | {'Total Flash':>{self.COL_TOTAL_FLASH}} | {'Total RAM':>{self.COL_TOTAL_RAM}}"
)
lines.append(self.MAIN_TABLE_SEPARATOR)
for name, mem in components:
if mem.flash_total > 0 or mem.ram_total > 0:
flash_rodata = mem.rodata_size + mem.data_size
lines.append(
f"{name:<{self.COL_COMPONENT}} | {mem.text_size:>{self.COL_FLASH_TEXT - 2},} B | {flash_rodata:>{self.COL_FLASH_DATA - 2},} B | "
f"{mem.data_size:>{self.COL_RAM_DATA - 2},} B | {mem.bss_size:>{self.COL_RAM_BSS - 2},} B | "
f"{mem.flash_total:>{self.COL_TOTAL_FLASH - 2},} B | {mem.ram_total:>{self.COL_TOTAL_RAM - 2},} B"
)
lines.append(self.MAIN_TABLE_SEPARATOR)
lines.append(
f"{'TOTAL':<{self.COL_COMPONENT}} | {' ':>{self.COL_FLASH_TEXT}} | {' ':>{self.COL_FLASH_DATA}} | "
f"{' ':>{self.COL_RAM_DATA}} | {' ':>{self.COL_RAM_BSS}} | "
f"{total_flash:>{self.COL_TOTAL_FLASH - 2},} B | {total_ram:>{self.COL_TOTAL_RAM - 2},} B"
)
# Top consumers
lines.append("")
lines.append("Top Flash Consumers:")
for i, (name, mem) in enumerate(components[:25]):
if mem.flash_total > 0:
percentage = (
(mem.flash_total / total_flash * 100) if total_flash > 0 else 0
)
lines.append(
f"{i + 1}. {name} ({mem.flash_total:,} B) - {percentage:.1f}% of analyzed flash"
)
lines.append("")
lines.append("Top RAM Consumers:")
ram_components = sorted(components, key=lambda x: x[1].ram_total, reverse=True)
for i, (name, mem) in enumerate(ram_components[:25]):
if mem.ram_total > 0:
percentage = (mem.ram_total / total_ram * 100) if total_ram > 0 else 0
lines.append(
f"{i + 1}. {name} ({mem.ram_total:,} B) - {percentage:.1f}% of analyzed RAM"
)
lines.append("")
lines.append(
"Note: This analysis covers symbols in the ELF file. Some runtime allocations may not be included."
)
lines.append("=" * self.TABLE_WIDTH)
# Add ESPHome core detailed analysis if there are core symbols
if self._esphome_core_symbols:
lines.append("")
lines.append("=" * self.TABLE_WIDTH)
lines.append(
f"{_COMPONENT_CORE} Detailed Analysis".center(self.TABLE_WIDTH)
)
lines.append("=" * self.TABLE_WIDTH)
lines.append("")
# Group core symbols by subcategory
core_subcategories: dict[str, list[tuple[str, str, int]]] = defaultdict(
list
)
for symbol, demangled, size in self._esphome_core_symbols:
# Categorize based on demangled name patterns
subcategory = self._categorize_esphome_core_symbol(demangled)
core_subcategories[subcategory].append((symbol, demangled, size))
# Sort subcategories by total size
sorted_subcategories = sorted(
[
(name, symbols, sum(s[2] for s in symbols))
for name, symbols in core_subcategories.items()
],
key=lambda x: x[2],
reverse=True,
)
lines.append(
f"{'Subcategory':<{self.COL_CORE_SUBCATEGORY}} | {'Size':>{self.COL_CORE_SIZE}} | "
f"{'Count':>{self.COL_CORE_COUNT}} | {'% of Core':>{self.COL_CORE_PERCENT}}"
)
lines.append(self.CORE_TABLE_SEPARATOR)
core_total = sum(size for _, _, size in self._esphome_core_symbols)
for subcategory, symbols, total_size in sorted_subcategories:
percentage = (total_size / core_total * 100) if core_total > 0 else 0
lines.append(
f"{subcategory:<{self.COL_CORE_SUBCATEGORY}} | {total_size:>{self.COL_CORE_SIZE - 2},} B | "
f"{len(symbols):>{self.COL_CORE_COUNT}} | {percentage:>{self.COL_CORE_PERCENT - 1}.1f}%"
)
# Top 15 largest core symbols
lines.append("")
lines.append(f"Top 15 Largest {_COMPONENT_CORE} Symbols:")
sorted_core_symbols = sorted(
self._esphome_core_symbols, key=lambda x: x[2], reverse=True
)
for i, (symbol, demangled, size) in enumerate(sorted_core_symbols[:15]):
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
lines.append("=" * self.TABLE_WIDTH)
# Add detailed analysis for top ESPHome and external components
esphome_components = [
(name, mem)
for name, mem in components
if name.startswith(_COMPONENT_PREFIX_ESPHOME) and name != _COMPONENT_CORE
]
external_components = [
(name, mem)
for name, mem in components
if name.startswith(_COMPONENT_PREFIX_EXTERNAL)
]
top_esphome_components = sorted(
esphome_components, key=lambda x: x[1].flash_total, reverse=True
)[:30]
# Include all external components (they're usually important)
top_external_components = sorted(
external_components, key=lambda x: x[1].flash_total, reverse=True
)
# Check if API component exists and ensure it's included
api_component = None
for name, mem in components:
if name == _COMPONENT_API:
api_component = (name, mem)
break
# Combine all components to analyze: top ESPHome + all external + API if not already included
components_to_analyze = list(top_esphome_components) + list(
top_external_components
)
if api_component and api_component not in components_to_analyze:
components_to_analyze.append(api_component)
if components_to_analyze:
for comp_name, comp_mem in components_to_analyze:
if not (comp_symbols := self._component_symbols.get(comp_name, [])):
continue
lines.append("")
lines.append("=" * self.TABLE_WIDTH)
lines.append(f"{comp_name} Detailed Analysis".center(self.TABLE_WIDTH))
lines.append("=" * self.TABLE_WIDTH)
lines.append("")
# Sort symbols by size
sorted_symbols = sorted(comp_symbols, key=lambda x: x[2], reverse=True)
lines.append(f"Total symbols: {len(sorted_symbols)}")
lines.append(f"Total size: {comp_mem.flash_total:,} B")
lines.append("")
# Show all symbols > 100 bytes for better visibility
large_symbols = [
(sym, dem, size) for sym, dem, size in sorted_symbols if size > 100
]
lines.append(
f"{comp_name} Symbols > 100 B ({len(large_symbols)} symbols):"
)
for i, (symbol, demangled, size) in enumerate(large_symbols):
lines.append(f"{i + 1}. {demangled} ({size:,} B)")
lines.append("=" * self.TABLE_WIDTH)
return "\n".join(lines)
def dump_uncategorized_symbols(self, output_file: str | None = None) -> None:
"""Dump uncategorized symbols for analysis."""
# Sort by size descending
sorted_symbols = sorted(
self._uncategorized_symbols, key=lambda x: x[2], reverse=True
)
lines = ["Uncategorized Symbols Analysis", "=" * 80]
lines.append(f"Total uncategorized symbols: {len(sorted_symbols)}")
lines.append(
f"Total uncategorized size: {sum(s[2] for s in sorted_symbols):,} bytes"
)
lines.append("")
lines.append(f"{'Size':>10} | {'Symbol':<60} | Demangled")
lines.append("-" * 10 + "-+-" + "-" * 60 + "-+-" + "-" * 40)
for symbol, demangled, size in sorted_symbols[:100]: # Top 100
demangled_display = (
demangled[:100] if symbol != demangled else "[not demangled]"
)
lines.append(f"{size:>10,} | {symbol[:60]:<60} | {demangled_display}")
if len(sorted_symbols) > 100:
lines.append(f"\n... and {len(sorted_symbols) - 100} more symbols")
content = "\n".join(lines)
if output_file:
with open(output_file, "w", encoding="utf-8") as f:
f.write(content)
else:
print(content)
def analyze_elf(
elf_path: str,
objdump_path: str | None = None,
readelf_path: str | None = None,
detailed: bool = False,
external_components: set[str] | None = None,
) -> str:
"""Analyze an ELF file and return a memory report."""
analyzer = MemoryAnalyzerCLI(
elf_path, objdump_path, readelf_path, external_components
)
analyzer.analyze()
return analyzer.generate_report(detailed)
def main():
"""CLI entrypoint for memory analysis."""
if len(sys.argv) < 2:
print("Usage: python -m esphome.analyze_memory <build_directory>")
print("\nAnalyze memory usage from an ESPHome build directory.")
print("The build directory should contain firmware.elf and idedata will be")
print("loaded from ~/.esphome/.internal/idedata/<device>.json")
print("\nExamples:")
print(" python -m esphome.analyze_memory ~/.esphome/build/my-device")
print(" python -m esphome.analyze_memory .esphome/build/my-device")
print(" python -m esphome.analyze_memory my-device # Short form")
sys.exit(1)
build_dir = sys.argv[1]
# Load build directory
import json
from pathlib import Path
from esphome.platformio_api import IDEData
build_path = Path(build_dir)
# If no path separator in name, assume it's a device name
if "/" not in build_dir and not build_path.is_dir():
# Try current directory first
cwd_path = Path.cwd() / ".esphome" / "build" / build_dir
if cwd_path.is_dir():
build_path = cwd_path
print(f"Using build directory: {build_path}", file=sys.stderr)
else:
# Fall back to home directory
build_path = Path.home() / ".esphome" / "build" / build_dir
print(f"Using build directory: {build_path}", file=sys.stderr)
if not build_path.is_dir():
print(f"Error: {build_path} is not a directory", file=sys.stderr)
sys.exit(1)
# Find firmware.elf
elf_file = None
for elf_candidate in [
build_path / "firmware.elf",
build_path / ".pioenvs" / build_path.name / "firmware.elf",
]:
if elf_candidate.exists():
elf_file = str(elf_candidate)
break
if not elf_file:
print(f"Error: firmware.elf not found in {build_dir}", file=sys.stderr)
sys.exit(1)
# Find idedata.json - check current directory first, then home
device_name = build_path.name
idedata_candidates = [
Path.cwd() / ".esphome" / "idedata" / f"{device_name}.json",
Path.home() / ".esphome" / "idedata" / f"{device_name}.json",
]
idedata = None
for idedata_path in idedata_candidates:
if not idedata_path.exists():
continue
try:
with open(idedata_path, encoding="utf-8") as f:
raw_data = json.load(f)
idedata = IDEData(raw_data)
print(f"Loaded idedata from: {idedata_path}", file=sys.stderr)
break
except (json.JSONDecodeError, OSError) as e:
print(f"Warning: Failed to load idedata: {e}", file=sys.stderr)
if not idedata:
print(
f"Warning: idedata not found (searched {idedata_candidates[0]} and {idedata_candidates[1]})",
file=sys.stderr,
)
analyzer = MemoryAnalyzerCLI(elf_file, idedata=idedata)
analyzer.analyze()
report = analyzer.generate_report()
print(report)
if __name__ == "__main__":
main()

View File

@@ -1,903 +0,0 @@
"""Constants for memory analysis symbol pattern matching."""
import re
# Pattern to extract ESPHome component namespaces dynamically
ESPHOME_COMPONENT_PATTERN = re.compile(r"esphome::([a-zA-Z0-9_]+)::")
# Section mapping for ELF file sections
# Maps standard section names to their various platform-specific variants
SECTION_MAPPING = {
".text": frozenset([".text", ".iram"]),
".rodata": frozenset([".rodata"]),
".data": frozenset([".data", ".dram"]),
".bss": frozenset([".bss"]),
}
# Section to ComponentMemory attribute mapping
# Maps section names to the attribute name in ComponentMemory dataclass
SECTION_TO_ATTR = {
".text": "text_size",
".rodata": "rodata_size",
".data": "data_size",
".bss": "bss_size",
}
# Component identification rules
# Symbol patterns: patterns found in raw symbol names
SYMBOL_PATTERNS = {
"freertos": [
"vTask",
"xTask",
"xQueue",
"pvPort",
"vPort",
"uxTask",
"pcTask",
"prvTimerTask",
"prvAddNewTaskToReadyList",
"pxReadyTasksLists",
"prvAddCurrentTaskToDelayedList",
"xEventGroupWaitBits",
"xRingbufferSendFromISR",
"prvSendItemDoneNoSplit",
"prvReceiveGeneric",
"prvSendAcquireGeneric",
"prvCopyItemAllowSplit",
"xEventGroup",
"xRingbuffer",
"prvSend",
"prvReceive",
"prvCopy",
"xPort",
"ulTaskGenericNotifyTake",
"prvIdleTask",
"prvInitialiseNewTask",
"prvIsYieldRequiredSMP",
"prvGetItemByteBuf",
"prvInitializeNewRingbuffer",
"prvAcquireItemNoSplit",
"prvNotifyQueueSetContainer",
"ucStaticTimerQueueStorage",
"eTaskGetState",
"main_task",
"do_system_init_fn",
"xSemaphoreCreateGenericWithCaps",
"vListInsert",
"uxListRemove",
"vRingbufferReturnItem",
"vRingbufferReturnItemFromISR",
"prvCheckItemFitsByteBuffer",
"prvGetCurMaxSizeAllowSplit",
"tick_hook",
"sys_sem_new",
"sys_arch_mbox_fetch",
"sys_arch_sem_wait",
"prvDeleteTCB",
"vQueueDeleteWithCaps",
"vRingbufferDeleteWithCaps",
"vSemaphoreDeleteWithCaps",
"prvCheckItemAvail",
"prvCheckTaskCanBeScheduledSMP",
"prvGetCurMaxSizeNoSplit",
"prvResetNextTaskUnblockTime",
"prvReturnItemByteBuf",
"vApplicationStackOverflowHook",
"vApplicationGetIdleTaskMemory",
"sys_init",
"sys_mbox_new",
"sys_arch_mbox_tryfetch",
],
"xtensa": ["xt_", "_xt_", "xPortEnterCriticalTimeout"],
"heap": ["heap_", "multi_heap"],
"spi_flash": ["spi_flash"],
"rtc": ["rtc_", "rtcio_ll_"],
"gpio_driver": ["gpio_", "pins"],
"uart_driver": ["uart", "_uart", "UART"],
"timer": ["timer_", "esp_timer"],
"peripherals": ["periph_", "periman"],
"network_stack": [
"vj_compress",
"raw_sendto",
"raw_input",
"etharp_",
"icmp_input",
"socket_ipv6",
"ip_napt",
"socket_ipv4_multicast",
"socket_ipv6_multicast",
"netconn_",
"recv_raw",
"accept_function",
"netconn_recv_data",
"netconn_accept",
"netconn_write_vectors_partly",
"netconn_drain",
"raw_connect",
"raw_bind",
"icmp_send_response",
"sockets",
"icmp_dest_unreach",
"inet_chksum_pseudo",
"alloc_socket",
"done_socket",
"set_global_fd_sets",
"inet_chksum_pbuf",
"tryget_socket_unconn_locked",
"tryget_socket_unconn",
"cs_create_ctrl_sock",
"netbuf_alloc",
],
"ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"],
"wifi_stack": [
"ieee80211",
"hostap",
"sta_",
"ap_",
"scan_",
"wifi_",
"wpa_",
"wps_",
"esp_wifi",
"cnx_",
"wpa3_",
"sae_",
"wDev_",
"ic_",
"mac_",
"esf_buf",
"gWpaSm",
"sm_WPA",
"eapol_",
"owe_",
"wifiLowLevelInit",
"s_do_mapping",
"gScanStruct",
"ppSearchTxframe",
"ppMapWaitTxq",
"ppFillAMPDUBar",
"ppCheckTxConnTrafficIdle",
"ppCalTkipMic",
],
"bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"],
"wifi_bt_coex": ["coex"],
"bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"],
"bluedroid_bt": [
"bluedroid",
"btc_",
"bta_",
"btm_",
"btu_",
"BTM_",
"GATT",
"L2CA_",
"smp_",
"gatts_",
"attp_",
"l2cu_",
"l2cb",
"smp_cb",
"BTA_GATTC_",
"SMP_",
"BTU_",
"BTA_Dm",
"GAP_Ble",
"BT_tx_if",
"host_recv_pkt_cb",
"saved_local_oob_data",
"string_to_bdaddr",
"string_is_bdaddr",
"CalConnectParamTimeout",
"transmit_fragment",
"transmit_data",
"event_command_ready",
"read_command_complete_header",
"parse_read_local_extended_features_response",
"parse_read_local_version_info_response",
"should_request_high",
"btdm_wakeup_request",
"BTA_SetAttributeValue",
"BTA_EnableBluetooth",
"transmit_command_futured",
"transmit_command",
"get_waiting_command",
"make_command",
"transmit_downward",
"host_recv_adv_packet",
"copy_extra_byte_in_db",
"parse_read_local_supported_commands_response",
],
"crypto_math": [
"ecp_",
"bignum_",
"mpi_",
"sswu",
"modp",
"dragonfly_",
"gcm_mult",
"__multiply",
"quorem",
"__mdiff",
"__lshift",
"__mprec_tens",
"ECC_",
"multiprecision_",
"mix_sub_columns",
"sbox",
"gfm2_sbox",
"gfm3_sbox",
"curve_p256",
"curve",
"p_256_init_curve",
"shift_sub_rows",
"rshift",
],
"hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"],
"libc": [
"printf",
"scanf",
"malloc",
"free",
"memcpy",
"memset",
"strcpy",
"strlen",
"_dtoa",
"_fopen",
"__sfvwrite_r",
"qsort",
"__sf",
"__sflush_r",
"__srefill_r",
"_impure_data",
"_reclaim_reent",
"_open_r",
"strncpy",
"_strtod_l",
"__gethex",
"__hexnan",
"_setenv_r",
"_tzset_unlocked_r",
"__tzcalc_limits",
"select",
"scalbnf",
"strtof",
"strtof_l",
"__d2b",
"__b2d",
"__s2b",
"_Balloc",
"__multadd",
"__lo0bits",
"__atexit0",
"__smakebuf_r",
"__swhatbuf_r",
"_sungetc_r",
"_close_r",
"_link_r",
"_unsetenv_r",
"_rename_r",
"__month_lengths",
"tzinfo",
"__ratio",
"__hi0bits",
"__ulp",
"__any_on",
"__copybits",
"L_shift",
"_fcntl_r",
"_lseek_r",
"_read_r",
"_write_r",
"_unlink_r",
"_fstat_r",
"access",
"fsync",
"tcsetattr",
"tcgetattr",
"tcflush",
"tcdrain",
"__ssrefill_r",
"_stat_r",
"__hexdig_fun",
"__mcmp",
"_fwalk_sglue",
"__fpclassifyf",
"_setlocale_r",
"_mbrtowc_r",
"fcntl",
"__match",
"_lock_close",
"__c$",
"__func__$",
"__FUNCTION__$",
"DAYS_IN_MONTH",
"_DAYS_BEFORE_MONTH",
"CSWTCH$",
"dst$",
"sulp",
],
"string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"],
"memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"],
"file_io": [
"fread",
"fwrite",
"fopen",
"fclose",
"fseek",
"ftell",
"fflush",
"s_fd_table",
],
"string_formatting": [
"snprintf",
"vsnprintf",
"sprintf",
"vsprintf",
"sscanf",
"vsscanf",
],
"cpp_anonymous": ["_GLOBAL__N_", "n$"],
"cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"],
"exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"],
"static_init": ["_GLOBAL__sub_I_"],
"mdns_lib": ["mdns"],
"phy_radio": [
"phy_",
"rf_",
"chip_",
"register_chipv7",
"pbus_",
"bb_",
"fe_",
"rfcal_",
"ram_rfcal",
"tx_pwctrl",
"rx_chan",
"set_rx_gain",
"set_chan",
"agc_reg",
"ram_txiq",
"ram_txdc",
"ram_gen_rx_gain",
"rx_11b_opt",
"set_rx_sense",
"set_rx_gain_cal",
"set_chan_dig_gain",
"tx_pwctrl_init_cal",
"rfcal_txiq",
"set_tx_gain_table",
"correct_rfpll_offset",
"pll_correct_dcap",
"txiq_cal_init",
"pwdet_sar",
"pwdet_sar2_init",
"ram_iq_est_enable",
"ram_rfpll_set_freq",
"ant_wifirx_cfg",
"ant_btrx_cfg",
"force_txrxoff",
"force_txrx_off",
"tx_paon_set",
"opt_11b_resart",
"rfpll_1p2_opt",
"ram_dc_iq_est",
"ram_start_tx_tone",
"ram_en_pwdet",
"ram_cbw2040_cfg",
"rxdc_est_min",
"i2cmst_reg_init",
"temprature_sens_read",
"ram_restart_cal",
"ram_write_gain_mem",
"ram_wait_rfpll_cal_end",
"txcal_debuge_mode",
"ant_wifitx_cfg",
"reg_init_begin",
],
"wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"],
"wifi_lmac": ["lmac"],
"wifi_device": ["wdev", "wDev_"],
"power_mgmt": [
"pm_",
"sleep",
"rtc_sleep",
"light_sleep",
"deep_sleep",
"power_down",
"g_pm",
],
"memory_mgmt": [
"mem_",
"memory_",
"tlsf_",
"memp_",
"pbuf_",
"pbuf_alloc",
"pbuf_copy_partial_pbuf",
],
"hal_layer": ["hal_"],
"clock_mgmt": [
"clk_",
"clock_",
"rtc_clk",
"apb_",
"cpu_freq",
"setCpuFrequencyMhz",
],
"cache_mgmt": ["cache"],
"flash_ops": ["flash", "image_load"],
"interrupt_handlers": [
"isr",
"interrupt",
"intr_",
"exc_",
"exception",
"port_IntStack",
],
"wrapper_functions": ["_wrapper"],
"error_handling": ["panic", "abort", "assert", "error_", "fault"],
"authentication": ["auth"],
"ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"],
"dhcp": ["dhcp", "handle_dhcp"],
"ethernet_phy": [
"emac_",
"eth_phy_",
"phy_tlk110",
"phy_lan87",
"phy_ip101",
"phy_rtl",
"phy_dp83",
"phy_ksz",
"lan87xx_",
"rtl8201_",
"ip101_",
"ksz80xx_",
"jl1101_",
"dp83848_",
"eth_on_state_changed",
],
"threading": ["pthread_", "thread_", "_task_"],
"pthread": ["pthread"],
"synchronization": ["mutex", "semaphore", "spinlock", "portMUX"],
"math_lib": [
"sin",
"cos",
"tan",
"sqrt",
"pow",
"exp",
"log",
"atan",
"asin",
"acos",
"floor",
"ceil",
"fabs",
"round",
],
"random": ["rand", "random", "rng_", "prng"],
"time_lib": [
"time",
"clock",
"gettimeofday",
"settimeofday",
"localtime",
"gmtime",
"mktime",
"strftime",
],
"console_io": ["console_", "uart_tx", "uart_rx", "puts", "putchar", "getchar"],
"rom_functions": ["r_", "rom_"],
"compiler_runtime": [
"__divdi3",
"__udivdi3",
"__moddi3",
"__muldi3",
"__ashldi3",
"__ashrdi3",
"__lshrdi3",
"__cmpdi2",
"__fixdfdi",
"__floatdidf",
],
"libgcc": ["libgcc", "_divdi3", "_udivdi3"],
"boot_startup": ["boot", "start_cpu", "call_start", "startup", "bootloader"],
"bootloader": ["bootloader_", "esp_bootloader"],
"app_framework": ["app_", "initArduino", "setup", "loop", "Update"],
"weak_symbols": ["__weak_"],
"compiler_builtins": ["__builtin_"],
"vfs": ["vfs_", "VFS"],
"esp32_sdk": ["esp32_", "esp32c", "esp32s"],
"usb": ["usb_", "USB", "cdc_", "CDC"],
"i2c_driver": ["i2c_", "I2C"],
"i2s_driver": ["i2s_", "I2S"],
"spi_driver": ["spi_", "SPI"],
"adc_driver": ["adc_", "ADC"],
"dac_driver": ["dac_", "DAC"],
"touch_driver": ["touch_", "TOUCH"],
"pwm_driver": ["pwm_", "PWM", "ledc_", "LEDC"],
"rmt_driver": ["rmt_", "RMT"],
"pcnt_driver": ["pcnt_", "PCNT"],
"can_driver": ["can_", "CAN", "twai_", "TWAI"],
"sdmmc_driver": ["sdmmc_", "SDMMC", "sdcard", "sd_card"],
"temp_sensor": ["temp_sensor", "tsens_"],
"watchdog": ["wdt_", "WDT", "watchdog"],
"brownout": ["brownout", "bod_"],
"ulp": ["ulp_", "ULP"],
"psram": ["psram", "PSRAM", "spiram", "SPIRAM"],
"efuse": ["efuse", "EFUSE"],
"partition": ["partition", "esp_partition"],
"esp_event": ["esp_event", "event_loop", "event_callback"],
"esp_console": ["esp_console", "console_"],
"chip_specific": ["chip_", "esp_chip"],
"esp_system_utils": ["esp_system", "esp_hw", "esp_clk", "esp_sleep"],
"ipc": ["esp_ipc", "ipc_"],
"wifi_config": [
"g_cnxMgr",
"gChmCxt",
"g_ic",
"TxRxCxt",
"s_dp",
"s_ni",
"s_reg_dump",
"packet$",
"d_mult_table",
"K",
"fcstab",
],
"smartconfig": ["sc_ack_send"],
"rc_calibration": ["rc_cal", "rcUpdate"],
"noise_floor": ["noise_check"],
"rf_calibration": [
"set_rx_sense",
"set_rx_gain_cal",
"set_chan_dig_gain",
"tx_pwctrl_init_cal",
"rfcal_txiq",
"set_tx_gain_table",
"correct_rfpll_offset",
"pll_correct_dcap",
"txiq_cal_init",
"pwdet_sar",
"rx_11b_opt",
],
"wifi_crypto": [
"pk_use_ecparams",
"process_segments",
"ccmp_",
"rc4_",
"aria_",
"mgf_mask",
"dh_group",
"ccmp_aad_nonce",
"ccmp_encrypt",
"rc4_skip",
"aria_sb1",
"aria_sb2",
"aria_is1",
"aria_is2",
"aria_sl",
"aria_a",
],
"radio_control": ["fsm_input", "fsm_sconfreq"],
"pbuf": [
"pbuf_",
],
"event_group": ["xEventGroup"],
"ringbuffer": ["xRingbuffer", "prvSend", "prvReceive", "prvCopy"],
"provisioning": ["prov_", "prov_stop_and_notify"],
"scan": ["gScanStruct"],
"port": ["xPort"],
"elf_loader": [
"elf_add",
"elf_add_note",
"elf_add_segment",
"process_image",
"read_encoded",
"read_encoded_value",
"read_encoded_value_with_base",
"process_image_header",
],
"socket_api": [
"sockets",
"netconn_",
"accept_function",
"recv_raw",
"socket_ipv4_multicast",
"socket_ipv6_multicast",
],
"igmp": ["igmp_", "igmp_send", "igmp_input"],
"icmp6": ["icmp6_"],
"arp": ["arp_table"],
"ampdu": [
"ampdu_",
"rcAmpdu",
"trc_onAmpduOp",
"rcAmpduLowerRate",
"ampdu_dispatch_upto",
],
"ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"],
"rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"],
"nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"],
"channel_mgmt": ["chm_init", "chm_set_current_channel"],
"trace": ["trc_init", "trc_onAmpduOp"],
"country_code": ["country_info", "country_info_24ghz"],
"multicore": ["do_multicore_settings"],
"Update_lib": ["Update"],
"stdio": [
"__sf",
"__sflush_r",
"__srefill_r",
"_impure_data",
"_reclaim_reent",
"_open_r",
],
"strncpy_ops": ["strncpy"],
"math_internal": ["__mdiff", "__lshift", "__mprec_tens", "quorem"],
"character_class": ["__chclass"],
"camellia": ["camellia_", "camellia_feistel"],
"crypto_tables": ["FSb", "FSb2", "FSb3", "FSb4"],
"event_buffer": ["g_eb_list_desc", "eb_space"],
"base_node": ["base_node_", "base_node_add_handler"],
"file_descriptor": ["s_fd_table"],
"tx_delay": ["tx_delay_cfg"],
"deinit": ["deinit_functions"],
"lcp_echo": ["LcpEchoCheck"],
"raw_api": ["raw_bind", "raw_connect"],
"checksum": ["process_checksum"],
"entry_management": ["add_entry"],
"esp_ota": ["esp_ota", "ota_", "read_otadata"],
"http_server": [
"httpd_",
"parse_url_char",
"cb_headers_complete",
"delete_entry",
"validate_structure",
"config_save",
"config_new",
"verify_url",
"cb_url",
],
"misc_system": [
"alarm_cbs",
"start_up",
"tokens",
"unhex",
"osi_funcs_ro",
"enum_function",
"fragment_and_dispatch",
"alarm_set",
"osi_alarm_new",
"config_set_string",
"config_update_newest_section",
"config_remove_key",
"method_strings",
"interop_match",
"interop_database",
"__state_table",
"__action_table",
"s_stub_table",
"s_context",
"s_mmu_ctx",
"s_get_bus_mask",
"hli_queue_put",
"list_remove",
"list_delete",
"lock_acquire_generic",
"is_vect_desc_usable",
"io_mode_str",
"__c$20233",
"interface",
"read_id_core",
"subscribe_idle",
"unsubscribe_idle",
"s_clkout_handle",
"lock_release_generic",
"config_set_int",
"config_get_int",
"config_get_string",
"config_has_key",
"config_remove_section",
"osi_alarm_init",
"osi_alarm_deinit",
"fixed_queue_enqueue",
"fixed_queue_dequeue",
"fixed_queue_new",
"fixed_pkt_queue_enqueue",
"fixed_pkt_queue_new",
"list_append",
"list_prepend",
"list_insert_after",
"list_contains",
"list_get_node",
"hash_function_blob",
"cb_no_body",
"cb_on_body",
"profile_tab",
"get_arg",
"trim",
"buf$",
"process_appended_hash_and_sig$constprop$0",
"uuidType",
"allocate_svc_db_buf",
"_hostname_is_ours",
"s_hli_handlers",
"tick_cb",
"idle_cb",
"input",
"entry_find",
"section_find",
"find_bucket_entry_",
"config_has_section",
"hli_queue_create",
"hli_queue_get",
"hli_c_handler",
"future_ready",
"future_await",
"future_new",
"pkt_queue_enqueue",
"pkt_queue_dequeue",
"pkt_queue_cleanup",
"pkt_queue_create",
"pkt_queue_destroy",
"fixed_pkt_queue_dequeue",
"osi_alarm_cancel",
"osi_alarm_is_active",
"osi_sem_take",
"osi_event_create",
"osi_event_bind",
"alarm_cb_handler",
"list_foreach",
"list_back",
"list_front",
"list_clear",
"fixed_queue_try_peek_first",
"translate_path",
"get_idx",
"find_key",
"init",
"end",
"start",
"set_read_value",
"copy_address_list",
"copy_and_key",
"sdk_cfg_opts",
"leftshift_onebit",
"config_section_end",
"config_section_begin",
"find_entry_and_check_all_reset",
"image_validate",
"xPendingReadyList",
"vListInitialise",
"lock_init_generic",
"ant_bttx_cfg",
"ant_dft_cfg",
"cs_send_to_ctrl_sock",
"config_llc_util_funcs_reset",
"make_set_adv_report_flow_control",
"make_set_event_mask",
"raw_new",
"raw_remove",
"BTE_InitStack",
"parse_read_local_supported_features_response",
"__math_invalidf",
"tinytens",
"__mprec_tinytens",
"__mprec_bigtens",
"vRingbufferDelete",
"vRingbufferDeleteWithCaps",
"vRingbufferReturnItem",
"vRingbufferReturnItemFromISR",
"get_acl_data_size_ble",
"get_features_ble",
"get_features_classic",
"get_acl_packet_size_ble",
"get_acl_packet_size_classic",
"supports_extended_inquiry_response",
"supports_rssi_with_inquiry_results",
"supports_interlaced_inquiry_scan",
"supports_reading_remote_extended_features",
],
"bluetooth_ll": [
"lld_pdu_",
"ld_acl_",
"lld_stop_ind_handler",
"lld_evt_winsize_change",
"config_lld_evt_funcs_reset",
"config_lld_funcs_reset",
"config_llm_funcs_reset",
"llm_set_long_adv_data",
"lld_retry_tx_prog",
"llc_link_sup_to_ind_handler",
"config_llc_funcs_reset",
"lld_evt_rxwin_compute",
"config_btdm_funcs_reset",
"config_ea_funcs_reset",
"llc_defalut_state_tab_reset",
"config_rwip_funcs_reset",
"ke_lmp_rx_flooding_detect",
],
}
# Demangled patterns: patterns found in demangled C++ names
DEMANGLED_PATTERNS = {
"gpio_driver": ["GPIO"],
"uart_driver": ["UART"],
"network_stack": [
"lwip",
"tcp",
"udp",
"ip4",
"ip6",
"dhcp",
"dns",
"netif",
"ethernet",
"ppp",
"slip",
],
"wifi_stack": ["NetworkInterface"],
"nimble_bt": [
"nimble",
"NimBLE",
"ble_hs",
"ble_gap",
"ble_gatt",
"ble_att",
"ble_l2cap",
"ble_sm",
],
"crypto": ["mbedtls", "crypto", "sha", "aes", "rsa", "ecc", "tls", "ssl"],
"cpp_stdlib": ["std::", "__gnu_cxx::", "__cxxabiv"],
"static_init": ["__static_initialization"],
"rtti": ["__type_info", "__class_type_info"],
"web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"],
"async_tcp": ["AsyncClient", "AsyncServer"],
"mdns_lib": ["mdns"],
"json_lib": [
"ArduinoJson",
"JsonDocument",
"JsonArray",
"JsonObject",
"deserialize",
"serialize",
],
"http_lib": ["HTTP", "http_", "Request", "Response", "Uri", "WebSocket"],
"logging": ["log", "Log", "print", "Print", "diag_"],
"authentication": ["checkDigestAuthentication"],
"libgcc": ["libgcc"],
"esp_system": ["esp_", "ESP"],
"arduino": ["arduino"],
"nvs": ["nvs_", "_ZTVN3nvs", "nvs::"],
"filesystem": ["spiffs", "vfs"],
"libc": ["newlib"],
}
# Patterns for categorizing ESPHome core symbols into subcategories
CORE_SUBCATEGORY_PATTERNS = {
"Component Framework": ["Component"],
"Application Core": ["Application"],
"Scheduler": ["Scheduler"],
"Component Iterator": ["ComponentIterator"],
"Helper Functions": ["Helpers", "helpers"],
"Preferences/Storage": ["Preferences", "ESPPreferences"],
"I/O Utilities": ["HighFrequencyLoopRequester"],
"String Utilities": ["str_"],
"Bit Utilities": ["reverse_bits"],
"Data Conversion": ["convert_"],
"Network Utilities": ["network", "IPAddress"],
"API Protocol": ["api::"],
"WiFi Manager": ["wifi::"],
"MQTT Client": ["mqtt::"],
"Logger": ["logger::"],
"OTA Updates": ["ota::"],
"Web Server": ["web_server::"],
"Time Management": ["time::"],
"Sensor Framework": ["sensor::"],
"Binary Sensor": ["binary_sensor::"],
"Switch Framework": ["switch_::"],
"Light Framework": ["light::"],
"Climate Framework": ["climate::"],
"Cover Framework": ["cover::"],
}

View File

@@ -1,121 +0,0 @@
"""Helper functions for memory analysis."""
from functools import cache
from pathlib import Path
from .const import SECTION_MAPPING
# Import namespace constant from parent module
# Note: This would create a circular import if done at module level,
# so we'll define it locally here as well
_NAMESPACE_ESPHOME = "esphome::"
# Get the list of actual ESPHome components by scanning the components directory
@cache
def get_esphome_components():
"""Get set of actual ESPHome components from the components directory."""
# Find the components directory relative to this file
# Go up two levels from analyze_memory/helpers.py to esphome/
current_dir = Path(__file__).parent.parent
components_dir = current_dir / "components"
if not components_dir.exists() or not components_dir.is_dir():
return frozenset()
return frozenset(
item.name
for item in components_dir.iterdir()
if item.is_dir()
and not item.name.startswith(".")
and not item.name.startswith("__")
)
@cache
def get_component_class_patterns(component_name: str) -> list[str]:
"""Generate component class name patterns for symbol matching.
Args:
component_name: The component name (e.g., "ota", "wifi", "api")
Returns:
List of pattern strings to match against demangled symbols
"""
component_upper = component_name.upper()
component_camel = component_name.replace("_", "").title()
return [
f"{_NAMESPACE_ESPHOME}{component_upper}Component", # e.g., esphome::OTAComponent
f"{_NAMESPACE_ESPHOME}ESPHome{component_upper}Component", # e.g., esphome::ESPHomeOTAComponent
f"{_NAMESPACE_ESPHOME}{component_camel}Component", # e.g., esphome::OtaComponent
f"{_NAMESPACE_ESPHOME}ESPHome{component_camel}Component", # e.g., esphome::ESPHomeOtaComponent
]
def map_section_name(raw_section: str) -> str | None:
"""Map raw section name to standard section.
Args:
raw_section: Raw section name from ELF file (e.g., ".iram0.text", ".rodata.str1.1")
Returns:
Standard section name (".text", ".rodata", ".data", ".bss") or None
"""
for standard_section, patterns in SECTION_MAPPING.items():
if any(pattern in raw_section for pattern in patterns):
return standard_section
return None
def parse_symbol_line(line: str) -> tuple[str, str, int, str] | None:
"""Parse a single symbol line from objdump output.
Args:
line: Line from objdump -t output
Returns:
Tuple of (section, name, size, address) or None if not a valid symbol.
Format: address l/g w/d F/O section size name
Example: 40084870 l F .iram0.text 00000000 _xt_user_exc
"""
parts = line.split()
if len(parts) < 5:
return None
try:
# Validate and extract address
address = parts[0]
int(address, 16)
except ValueError:
return None
# Look for F (function) or O (object) flag
if "F" not in parts and "O" not in parts:
return None
# Find section, size, and name
for i, part in enumerate(parts):
if not part.startswith("."):
continue
section = map_section_name(part)
if not section:
break
# Need at least size field after section
if i + 1 >= len(parts):
break
try:
size = int(parts[i + 1], 16)
except ValueError:
break
# Need symbol name and non-zero size
if i + 2 >= len(parts) or size == 0:
break
name = " ".join(parts[i + 2 :])
return (section, name, size, address)
return None

View File

@@ -15,10 +15,7 @@ from esphome.const import (
CONF_TYPE_ID,
CONF_UPDATE_INTERVAL,
)
from esphome.core import ID
from esphome.cpp_generator import MockObj, MockObjClass, TemplateArgsType
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
from esphome.types import ConfigType
from esphome.util import Registry
@@ -52,11 +49,11 @@ def maybe_conf(conf, *validators):
return validate
def register_action(name: str, action_type: MockObjClass, schema: cv.Schema):
def register_action(name, action_type, schema):
return ACTION_REGISTRY.register(name, action_type, schema)
def register_condition(name: str, condition_type: MockObjClass, schema: cv.Schema):
def register_condition(name, condition_type, schema):
return CONDITION_REGISTRY.register(name, condition_type, schema)
@@ -167,78 +164,43 @@ XorCondition = cg.esphome_ns.class_("XorCondition", Condition)
@register_condition("and", AndCondition, validate_condition_list)
async def and_condition_to_code(
config: ConfigType,
condition_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def and_condition_to_code(config, condition_id, template_arg, args):
conditions = await build_condition_list(config, template_arg, args)
return cg.new_Pvariable(condition_id, template_arg, conditions)
@register_condition("or", OrCondition, validate_condition_list)
async def or_condition_to_code(
config: ConfigType,
condition_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def or_condition_to_code(config, condition_id, template_arg, args):
conditions = await build_condition_list(config, template_arg, args)
return cg.new_Pvariable(condition_id, template_arg, conditions)
@register_condition("all", AndCondition, validate_condition_list)
async def all_condition_to_code(
config: ConfigType,
condition_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def all_condition_to_code(config, condition_id, template_arg, args):
conditions = await build_condition_list(config, template_arg, args)
return cg.new_Pvariable(condition_id, template_arg, conditions)
@register_condition("any", OrCondition, validate_condition_list)
async def any_condition_to_code(
config: ConfigType,
condition_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def any_condition_to_code(config, condition_id, template_arg, args):
conditions = await build_condition_list(config, template_arg, args)
return cg.new_Pvariable(condition_id, template_arg, conditions)
@register_condition("not", NotCondition, validate_potentially_and_condition)
async def not_condition_to_code(
config: ConfigType,
condition_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def not_condition_to_code(config, condition_id, template_arg, args):
condition = await build_condition(config, template_arg, args)
return cg.new_Pvariable(condition_id, template_arg, condition)
@register_condition("xor", XorCondition, validate_condition_list)
async def xor_condition_to_code(
config: ConfigType,
condition_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def xor_condition_to_code(config, condition_id, template_arg, args):
conditions = await build_condition_list(config, template_arg, args)
return cg.new_Pvariable(condition_id, template_arg, conditions)
@register_condition("lambda", LambdaCondition, cv.returning_lambda)
async def lambda_condition_to_code(
config: ConfigType,
condition_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def lambda_condition_to_code(config, condition_id, template_arg, args):
lambda_ = await cg.process_lambda(config, args, return_type=bool)
return cg.new_Pvariable(condition_id, template_arg, lambda_)
@@ -255,12 +217,7 @@ async def lambda_condition_to_code(
}
).extend(cv.COMPONENT_SCHEMA),
)
async def for_condition_to_code(
config: ConfigType,
condition_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def for_condition_to_code(config, condition_id, template_arg, args):
condition = await build_condition(
config[CONF_CONDITION], cg.TemplateArguments(), []
)
@@ -274,12 +231,7 @@ async def for_condition_to_code(
@register_action(
"delay", DelayAction, cv.templatable(cv.positive_time_period_milliseconds)
)
async def delay_action_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def delay_action_to_code(config, action_id, template_arg, args):
var = cg.new_Pvariable(action_id, template_arg)
await cg.register_component(var, {})
template_ = await cg.templatable(config, args, cg.uint32)
@@ -304,15 +256,10 @@ async def delay_action_to_code(
cv.has_at_least_one_key(CONF_CONDITION, CONF_ANY, CONF_ALL),
),
)
async def if_action_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def if_action_to_code(config, action_id, template_arg, args):
cond_conf = next(el for el in config if el in (CONF_ANY, CONF_ALL, CONF_CONDITION))
condition = await build_condition(config[cond_conf], template_arg, args)
var = cg.new_Pvariable(action_id, template_arg, condition)
conditions = await build_condition(config[cond_conf], template_arg, args)
var = cg.new_Pvariable(action_id, template_arg, conditions)
if CONF_THEN in config:
actions = await build_action_list(config[CONF_THEN], template_arg, args)
cg.add(var.add_then(actions))
@@ -332,14 +279,9 @@ async def if_action_to_code(
}
),
)
async def while_action_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
condition = await build_condition(config[CONF_CONDITION], template_arg, args)
var = cg.new_Pvariable(action_id, template_arg, condition)
async def while_action_to_code(config, action_id, template_arg, args):
conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
var = cg.new_Pvariable(action_id, template_arg, conditions)
actions = await build_action_list(config[CONF_THEN], template_arg, args)
cg.add(var.add_then(actions))
return var
@@ -355,12 +297,7 @@ async def while_action_to_code(
}
),
)
async def repeat_action_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def repeat_action_to_code(config, action_id, template_arg, args):
var = cg.new_Pvariable(action_id, template_arg)
count_template = await cg.templatable(config[CONF_COUNT], args, cg.uint32)
cg.add(var.set_count(count_template))
@@ -383,14 +320,9 @@ _validate_wait_until = cv.maybe_simple_value(
@register_action("wait_until", WaitUntilAction, _validate_wait_until)
async def wait_until_action_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
condition = await build_condition(config[CONF_CONDITION], template_arg, args)
var = cg.new_Pvariable(action_id, template_arg, condition)
async def wait_until_action_to_code(config, action_id, template_arg, args):
conditions = await build_condition(config[CONF_CONDITION], template_arg, args)
var = cg.new_Pvariable(action_id, template_arg, conditions)
if CONF_TIMEOUT in config:
template_ = await cg.templatable(config[CONF_TIMEOUT], args, cg.uint32)
cg.add(var.set_timeout_value(template_))
@@ -399,12 +331,7 @@ async def wait_until_action_to_code(
@register_action("lambda", LambdaAction, cv.lambda_)
async def lambda_action_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def lambda_action_to_code(config, action_id, template_arg, args):
lambda_ = await cg.process_lambda(config, args, return_type=cg.void)
return cg.new_Pvariable(action_id, template_arg, lambda_)
@@ -418,12 +345,7 @@ async def lambda_action_to_code(
}
),
)
async def component_update_action_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def component_update_action_to_code(config, action_id, template_arg, args):
comp = await cg.get_variable(config[CONF_ID])
return cg.new_Pvariable(action_id, template_arg, comp)
@@ -437,12 +359,7 @@ async def component_update_action_to_code(
}
),
)
async def component_suspend_action_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def component_suspend_action_to_code(config, action_id, template_arg, args):
comp = await cg.get_variable(config[CONF_ID])
return cg.new_Pvariable(action_id, template_arg, comp)
@@ -459,12 +376,7 @@ async def component_suspend_action_to_code(
}
),
)
async def component_resume_action_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
) -> MockObj:
async def component_resume_action_to_code(config, action_id, template_arg, args):
comp = await cg.get_variable(config[CONF_ID])
var = cg.new_Pvariable(action_id, template_arg, comp)
if CONF_UPDATE_INTERVAL in config:
@@ -473,9 +385,7 @@ async def component_resume_action_to_code(
return var
async def build_action(
full_config: ConfigType, template_arg: cg.TemplateArguments, args: TemplateArgsType
) -> MockObj:
async def build_action(full_config, template_arg, args):
registry_entry, config = cg.extract_registry_entry_config(
ACTION_REGISTRY, full_config
)
@@ -484,19 +394,15 @@ async def build_action(
return await builder(config, action_id, template_arg, args)
async def build_action_list(
config: list[ConfigType], templ: cg.TemplateArguments, arg_type: TemplateArgsType
) -> list[MockObj]:
actions: list[MockObj] = []
async def build_action_list(config, templ, arg_type):
actions = []
for conf in config:
action = await build_action(conf, templ, arg_type)
actions.append(action)
return actions
async def build_condition(
full_config: ConfigType, template_arg: cg.TemplateArguments, args: TemplateArgsType
) -> MockObj:
async def build_condition(full_config, template_arg, args):
registry_entry, config = cg.extract_registry_entry_config(
CONDITION_REGISTRY, full_config
)
@@ -505,19 +411,15 @@ async def build_condition(
return await builder(config, action_id, template_arg, args)
async def build_condition_list(
config: ConfigType, templ: cg.TemplateArguments, args: TemplateArgsType
) -> list[MockObj]:
conditions: list[MockObj] = []
async def build_condition_list(config, templ, args):
conditions = []
for conf in config:
condition = await build_condition(conf, templ, args)
conditions.append(condition)
return conditions
async def build_automation(
trigger: MockObj, args: TemplateArgsType, config: ConfigType
) -> MockObj:
async def build_automation(trigger, args, config):
arg_types = [arg[0] for arg in args]
templ = cg.TemplateArguments(*arg_types)
obj = cg.new_Pvariable(config[CONF_AUTOMATION_ID], templ, trigger)

View File

@@ -1,3 +1,5 @@
import os
from esphome.const import __version__
from esphome.core import CORE
from esphome.helpers import mkdir_p, read_file, write_file_if_changed
@@ -61,7 +63,7 @@ def write_ini(content):
update_storage_json()
path = CORE.relative_build_path("platformio.ini")
if path.is_file():
if os.path.isfile(path):
text = read_file(path)
content_format = find_begin_end(
text, INI_AUTO_GENERATE_BEGIN, INI_AUTO_GENERATE_END

View File

@@ -12,7 +12,6 @@ from esphome.cpp_generator import ( # noqa: F401
ArrayInitializer,
Expression,
LineComment,
LogStringLiteral,
MockObj,
MockObjClass,
Pvariable,

View File

@@ -61,10 +61,11 @@ void AbsoluteHumidityComponent::loop() {
ESP_LOGW(TAG, "No valid state from temperature sensor!");
}
if (no_humidity) {
ESP_LOGW(TAG, "No valid state from humidity sensor!");
ESP_LOGW(TAG, "No valid state from temperature sensor!");
}
ESP_LOGW(TAG, "Unable to calculate absolute humidity.");
this->publish_state(NAN);
this->status_set_warning(LOG_STR("Unable to calculate absolute humidity."));
this->status_set_warning();
return;
}
@@ -86,8 +87,9 @@ void AbsoluteHumidityComponent::loop() {
es = es_wobus(temperature_c);
break;
default:
ESP_LOGE(TAG, "Invalid saturation vapor pressure equation selection!");
this->publish_state(NAN);
this->status_set_error("Invalid saturation vapor pressure equation selection!");
this->status_set_error();
return;
}
ESP_LOGD(TAG, "Saturation vapor pressure %f kPa", es);

View File

@@ -11,8 +11,15 @@ from esphome.components.esp32.const import (
VARIANT_ESP32S2,
VARIANT_ESP32S3,
)
from esphome.config_helpers import filter_source_files_from_platform
import esphome.config_validation as cv
from esphome.const import CONF_ANALOG, CONF_INPUT, CONF_NUMBER, PLATFORM_ESP8266
from esphome.const import (
CONF_ANALOG,
CONF_INPUT,
CONF_NUMBER,
PLATFORM_ESP8266,
PlatformFramework,
)
from esphome.core import CORE
CODEOWNERS = ["@esphome/core"]
@@ -266,3 +273,21 @@ def validate_adc_pin(value):
)(value)
raise NotImplementedError
FILTER_SOURCE_FILES = filter_source_files_from_platform(
{
"adc_sensor_esp32.cpp": {
PlatformFramework.ESP32_ARDUINO,
PlatformFramework.ESP32_IDF,
},
"adc_sensor_esp8266.cpp": {PlatformFramework.ESP8266_ARDUINO},
"adc_sensor_rp2040.cpp": {PlatformFramework.RP2040_ARDUINO},
"adc_sensor_libretiny.cpp": {
PlatformFramework.BK72XX_ARDUINO,
PlatformFramework.RTL87XX_ARDUINO,
PlatformFramework.LN882X_ARDUINO,
},
"adc_sensor_zephyr.cpp": {PlatformFramework.NRF52_ZEPHYR},
}
)

View File

@@ -241,8 +241,6 @@ float ADCSensor::sample_autorange_() {
cali_config.bitwidth = ADC_BITWIDTH_DEFAULT;
err = adc_cali_create_scheme_curve_fitting(&cali_config, &handle);
ESP_LOGVV(TAG, "Autorange atten=%d: Calibration handle creation %s (err=%d)", atten,
(err == ESP_OK) ? "SUCCESS" : "FAILED", err);
#else
adc_cali_line_fitting_config_t cali_config = {
.unit_id = this->adc_unit_,
@@ -253,14 +251,10 @@ float ADCSensor::sample_autorange_() {
#endif
};
err = adc_cali_create_scheme_line_fitting(&cali_config, &handle);
ESP_LOGVV(TAG, "Autorange atten=%d: Calibration handle creation %s (err=%d)", atten,
(err == ESP_OK) ? "SUCCESS" : "FAILED", err);
#endif
int raw;
err = adc_oneshot_read(this->adc_handle_, this->channel_, &raw);
ESP_LOGVV(TAG, "Autorange atten=%d: Raw ADC read %s, value=%d (err=%d)", atten,
(err == ESP_OK) ? "SUCCESS" : "FAILED", raw, err);
if (err != ESP_OK) {
ESP_LOGW(TAG, "ADC read failed in autorange with error %d", err);
@@ -281,10 +275,8 @@ float ADCSensor::sample_autorange_() {
err = adc_cali_raw_to_voltage(handle, raw, &voltage_mv);
if (err == ESP_OK) {
voltage = voltage_mv / 1000.0f;
ESP_LOGVV(TAG, "Autorange atten=%d: CALIBRATED - raw=%d -> %dmV -> %.6fV", atten, raw, voltage_mv, voltage);
} else {
voltage = raw * 3.3f / 4095.0f;
ESP_LOGVV(TAG, "Autorange atten=%d: UNCALIBRATED FALLBACK - raw=%d -> %.6fV (3.3V ref)", atten, raw, voltage);
}
// Clean up calibration handle
#if USE_ESP32_VARIANT_ESP32C3 || USE_ESP32_VARIANT_ESP32C5 || USE_ESP32_VARIANT_ESP32C6 || \
@@ -295,7 +287,6 @@ float ADCSensor::sample_autorange_() {
#endif
} else {
voltage = raw * 3.3f / 4095.0f;
ESP_LOGVV(TAG, "Autorange atten=%d: NO CALIBRATION - raw=%d -> %.6fV (3.3V ref)", atten, raw, voltage);
}
return {raw, voltage};
@@ -333,32 +324,18 @@ float ADCSensor::sample_autorange_() {
}
const int adc_half = 2048;
const uint32_t c12 = std::min(raw12, adc_half);
const int32_t c6_signed = adc_half - std::abs(raw6 - adc_half);
const uint32_t c6 = (c6_signed > 0) ? c6_signed : 0; // Clamp to prevent underflow
const int32_t c2_signed = adc_half - std::abs(raw2 - adc_half);
const uint32_t c2 = (c2_signed > 0) ? c2_signed : 0; // Clamp to prevent underflow
const uint32_t c0 = std::min(4095 - raw0, adc_half);
const uint32_t csum = c12 + c6 + c2 + c0;
ESP_LOGVV(TAG, "Autorange summary:");
ESP_LOGVV(TAG, " Raw readings: 12db=%d, 6db=%d, 2.5db=%d, 0db=%d", raw12, raw6, raw2, raw0);
ESP_LOGVV(TAG, " Voltages: 12db=%.6f, 6db=%.6f, 2.5db=%.6f, 0db=%.6f", mv12, mv6, mv2, mv0);
ESP_LOGVV(TAG, " Coefficients: c12=%u, c6=%u, c2=%u, c0=%u, sum=%u", c12, c6, c2, c0, csum);
uint32_t c12 = std::min(raw12, adc_half);
uint32_t c6 = adc_half - std::abs(raw6 - adc_half);
uint32_t c2 = adc_half - std::abs(raw2 - adc_half);
uint32_t c0 = std::min(4095 - raw0, adc_half);
uint32_t csum = c12 + c6 + c2 + c0;
if (csum == 0) {
ESP_LOGE(TAG, "Invalid weight sum in autorange calculation");
return NAN;
}
const float final_result = (mv12 * c12 + mv6 * c6 + mv2 * c2 + mv0 * c0) / csum;
ESP_LOGV(TAG, "Autorange final: (%.6f*%u + %.6f*%u + %.6f*%u + %.6f*%u)/%u = %.6fV", mv12, c12, mv6, c6, mv2, c2, mv0,
c0, csum, final_result);
return final_result;
return (mv12 * c12 + mv6 * c6 + mv2 * c2 + mv0 * c0) / csum;
}
} // namespace adc

View File

@@ -9,7 +9,6 @@ from esphome.components.zephyr import (
zephyr_add_prj_conf,
zephyr_add_user,
)
from esphome.config_helpers import filter_source_files_from_platform
import esphome.config_validation as cv
from esphome.const import (
CONF_ATTENUATION,
@@ -21,7 +20,6 @@ from esphome.const import (
PLATFORM_NRF52,
STATE_CLASS_MEASUREMENT,
UNIT_VOLT,
PlatformFramework,
)
from esphome.core import CORE
@@ -176,21 +174,3 @@ async def to_code(config):
}};
"""
)
FILTER_SOURCE_FILES = filter_source_files_from_platform(
{
"adc_sensor_esp32.cpp": {
PlatformFramework.ESP32_ARDUINO,
PlatformFramework.ESP32_IDF,
},
"adc_sensor_esp8266.cpp": {PlatformFramework.ESP8266_ARDUINO},
"adc_sensor_rp2040.cpp": {PlatformFramework.RP2040_ARDUINO},
"adc_sensor_libretiny.cpp": {
PlatformFramework.BK72XX_ARDUINO,
PlatformFramework.RTL87XX_ARDUINO,
PlatformFramework.LN882X_ARDUINO,
},
"adc_sensor_zephyr.cpp": {PlatformFramework.NRF52_ZEPHYR},
}
)

View File

@@ -113,7 +113,7 @@ void ADE7880::update() {
if (this->channel_a_ != nullptr) {
auto *chan = this->channel_a_;
this->update_sensor_from_s24zp_register16_(chan->current, AIRMS, [](float val) { return val / 100000.0f; });
this->update_sensor_from_s24zp_register16_(chan->voltage, AVRMS, [](float val) { return val / 10000.0f; });
this->update_sensor_from_s24zp_register16_(chan->voltage, BVRMS, [](float val) { return val / 10000.0f; });
this->update_sensor_from_s24zp_register16_(chan->active_power, AWATT, [](float val) { return val / 100.0f; });
this->update_sensor_from_s24zp_register16_(chan->apparent_power, AVA, [](float val) { return val / 100.0f; });
this->update_sensor_from_s16_register16_(chan->power_factor, APF,

View File

@@ -89,7 +89,7 @@ void AGS10Component::dump_config() {
bool AGS10Component::new_i2c_address(uint8_t newaddress) {
uint8_t rev_newaddress = ~newaddress;
std::array<uint8_t, 5> data{newaddress, rev_newaddress, newaddress, rev_newaddress, 0};
data[4] = crc8(data.data(), 4, 0xFF, 0x31, true);
data[4] = calc_crc8_(data, 4);
if (!this->write_bytes(REG_ADDRESS, data)) {
this->error_code_ = COMMUNICATION_FAILED;
this->status_set_warning();
@@ -109,7 +109,7 @@ bool AGS10Component::set_zero_point_with_current_resistance() { return this->set
bool AGS10Component::set_zero_point_with(uint16_t value) {
std::array<uint8_t, 5> data{0x00, 0x0C, (uint8_t) ((value >> 8) & 0xFF), (uint8_t) (value & 0xFF), 0};
data[4] = crc8(data.data(), 4, 0xFF, 0x31, true);
data[4] = calc_crc8_(data, 4);
if (!this->write_bytes(REG_CALIBRATION, data)) {
this->error_code_ = COMMUNICATION_FAILED;
this->status_set_warning();
@@ -184,7 +184,7 @@ template<size_t N> optional<std::array<uint8_t, N>> AGS10Component::read_and_che
auto res = *data;
auto crc_byte = res[len];
if (crc_byte != crc8(res.data(), len, 0xFF, 0x31, true)) {
if (crc_byte != calc_crc8_(res, len)) {
this->error_code_ = CRC_CHECK_FAILED;
ESP_LOGE(TAG, "Reading AGS10 version failed: crc error!");
return optional<std::array<uint8_t, N>>();
@@ -192,5 +192,20 @@ template<size_t N> optional<std::array<uint8_t, N>> AGS10Component::read_and_che
return data;
}
template<size_t N> uint8_t AGS10Component::calc_crc8_(std::array<uint8_t, N> dat, uint8_t num) {
uint8_t i, byte1, crc = 0xFF;
for (byte1 = 0; byte1 < num; byte1++) {
crc ^= (dat[byte1]);
for (i = 0; i < 8; i++) {
if (crc & 0x80) {
crc = (crc << 1) ^ 0x31;
} else {
crc = (crc << 1);
}
}
}
return crc;
}
} // namespace ags10
} // namespace esphome

View File

@@ -1,9 +1,9 @@
#pragma once
#include "esphome/components/i2c/i2c.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/core/automation.h"
#include "esphome/core/component.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/i2c/i2c.h"
namespace esphome {
namespace ags10 {
@@ -99,6 +99,16 @@ class AGS10Component : public PollingComponent, public i2c::I2CDevice {
* Read, checks and returns data from the sensor.
*/
template<size_t N> optional<std::array<uint8_t, N>> read_and_check_(uint8_t a_register);
/**
* Calculates CRC8 value.
*
* CRC8 calculation, initial value: 0xFF, polynomial: 0x31 (x8+ x5+ x4+1)
*
* @param[in] dat the data buffer
* @param num number of bytes in the buffer
*/
template<size_t N> uint8_t calc_crc8_(std::array<uint8_t, N> dat, uint8_t num);
};
template<typename... Ts> class AGS10NewI2cAddressAction : public Action<Ts...>, public Parented<AGS10Component> {

View File

@@ -96,7 +96,7 @@ void AHT10Component::read_data_() {
ESP_LOGD(TAG, "Read attempt %d at %ums", this->read_count_, (unsigned) (millis() - this->start_time_));
}
if (this->read(data, 6) != i2c::ERROR_OK) {
this->status_set_warning(LOG_STR("Read failed, will retry"));
this->status_set_warning("Read failed, will retry");
this->restart_read_();
return;
}
@@ -113,7 +113,7 @@ void AHT10Component::read_data_() {
} else {
ESP_LOGD(TAG, "Invalid humidity, retrying");
if (this->write(AHT10_MEASURE_CMD, sizeof(AHT10_MEASURE_CMD)) != i2c::ERROR_OK) {
this->status_set_warning(LOG_STR(ESP_LOG_MSG_COMM_FAIL));
this->status_set_warning(ESP_LOG_MSG_COMM_FAIL);
}
this->restart_read_();
return;
@@ -144,7 +144,7 @@ void AHT10Component::update() {
return;
this->start_time_ = millis();
if (this->write(AHT10_MEASURE_CMD, sizeof(AHT10_MEASURE_CMD)) != i2c::ERROR_OK) {
this->status_set_warning(LOG_STR(ESP_LOG_MSG_COMM_FAIL));
this->status_set_warning(ESP_LOG_MSG_COMM_FAIL);
return;
}
this->restart_read_();

View File

@@ -18,6 +18,6 @@ CONFIG_SCHEMA = cv.Schema(
).extend(esp32_ble_tracker.ESP_BLE_DEVICE_SCHEMA)
async def to_code(config):
def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await esp32_ble_tracker.register_ble_device(var, config)
yield esp32_ble_tracker.register_ble_device(var, config)

View File

@@ -13,7 +13,7 @@ from esphome.const import (
CONF_TRIGGER_ID,
CONF_WEB_SERVER,
)
from esphome.core import CORE, CoroPriority, coroutine_with_priority
from esphome.core import CORE, coroutine_with_priority
from esphome.core.entity_helpers import entity_duplicate_validator, setup_entity
from esphome.cpp_generator import MockObjClass
@@ -345,6 +345,6 @@ async def alarm_control_panel_is_armed_to_code(
return cg.new_Pvariable(condition_id, template_arg, paren)
@coroutine_with_priority(CoroPriority.CORE)
@coroutine_with_priority(100.0)
async def to_code(config):
cg.add_global(alarm_control_panel_ns.using)

View File

@@ -29,6 +29,22 @@ namespace am2315c {
static const char *const TAG = "am2315c";
uint8_t AM2315C::crc8_(uint8_t *data, uint8_t len) {
uint8_t crc = 0xFF;
while (len--) {
crc ^= *data++;
for (uint8_t i = 0; i < 8; i++) {
if (crc & 0x80) {
crc <<= 1;
crc ^= 0x31;
} else {
crc <<= 1;
}
}
}
return crc;
}
bool AM2315C::reset_register_(uint8_t reg) {
// code based on demo code sent by www.aosong.com
// no further documentation.
@@ -70,7 +86,7 @@ bool AM2315C::convert_(uint8_t *data, float &humidity, float &temperature) {
humidity = raw * 9.5367431640625e-5;
raw = ((data[3] & 0x0F) << 16) | (data[4] << 8) | data[5];
temperature = raw * 1.9073486328125e-4 - 50;
return crc8(data, 6, 0xFF, 0x31, true) == data[6];
return this->crc8_(data, 6) == data[6];
}
void AM2315C::setup() {

View File

@@ -21,9 +21,9 @@
// SOFTWARE.
#pragma once
#include "esphome/components/i2c/i2c.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/core/component.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/i2c/i2c.h"
namespace esphome {
namespace am2315c {
@@ -39,6 +39,7 @@ class AM2315C : public PollingComponent, public i2c::I2CDevice {
void set_humidity_sensor(sensor::Sensor *humidity_sensor) { this->humidity_sensor_ = humidity_sensor; }
protected:
uint8_t crc8_(uint8_t *data, uint8_t len);
bool convert_(uint8_t *data, float &humidity, float &temperature);
bool reset_register_(uint8_t reg);

View File

@@ -26,12 +26,12 @@ uint32_t Animation::get_animation_frame_count() const { return this->animation_f
int Animation::get_current_frame() const { return this->current_frame_; }
void Animation::next_frame() {
this->current_frame_++;
if (loop_count_ && static_cast<uint32_t>(this->current_frame_) == loop_end_frame_ &&
if (loop_count_ && this->current_frame_ == loop_end_frame_ &&
(this->loop_current_iteration_ < loop_count_ || loop_count_ < 0)) {
this->current_frame_ = loop_start_frame_;
this->loop_current_iteration_++;
}
if (static_cast<uint32_t>(this->current_frame_) >= animation_frame_count_) {
if (this->current_frame_ >= animation_frame_count_) {
this->loop_current_iteration_ = 1;
this->current_frame_ = 0;
}

View File

@@ -1,5 +1,4 @@
import base64
import logging
from esphome import automation
from esphome.automation import Condition
@@ -9,59 +8,34 @@ import esphome.config_validation as cv
from esphome.const import (
CONF_ACTION,
CONF_ACTIONS,
CONF_CAPTURE_RESPONSE,
CONF_DATA,
CONF_DATA_TEMPLATE,
CONF_EVENT,
CONF_ID,
CONF_KEY,
CONF_MAX_CONNECTIONS,
CONF_ON_CLIENT_CONNECTED,
CONF_ON_CLIENT_DISCONNECTED,
CONF_ON_ERROR,
CONF_ON_SUCCESS,
CONF_PASSWORD,
CONF_PORT,
CONF_REBOOT_TIMEOUT,
CONF_RESPONSE_TEMPLATE,
CONF_SERVICE,
CONF_SERVICES,
CONF_TAG,
CONF_TRIGGER_ID,
CONF_VARIABLES,
)
from esphome.core import CORE, ID, CoroPriority, coroutine_with_priority
from esphome.cpp_generator import TemplateArgsType
from esphome.types import ConfigType
_LOGGER = logging.getLogger(__name__)
from esphome.core import CORE, coroutine_with_priority
DOMAIN = "api"
DEPENDENCIES = ["network"]
AUTO_LOAD = ["socket"]
CODEOWNERS = ["@esphome/core"]
def AUTO_LOAD(config: ConfigType) -> list[str]:
"""Conditionally auto-load json only when capture_response is used."""
base = ["socket"]
# Check if any homeassistant.action/homeassistant.service has capture_response: true
# This flag is set during config validation in _validate_response_config
if not config or CORE.data.get(DOMAIN, {}).get(CONF_CAPTURE_RESPONSE, False):
return base + ["json"]
return base
api_ns = cg.esphome_ns.namespace("api")
APIServer = api_ns.class_("APIServer", cg.Component, cg.Controller)
HomeAssistantServiceCallAction = api_ns.class_(
"HomeAssistantServiceCallAction", automation.Action
)
ActionResponse = api_ns.class_("ActionResponse")
HomeAssistantActionResponseTrigger = api_ns.class_(
"HomeAssistantActionResponseTrigger", automation.Trigger
)
APIConnectedCondition = api_ns.class_("APIConnectedCondition", Condition)
UserServiceTrigger = api_ns.class_("UserServiceTrigger", automation.Trigger)
@@ -81,8 +55,6 @@ CONF_BATCH_DELAY = "batch_delay"
CONF_CUSTOM_SERVICES = "custom_services"
CONF_HOMEASSISTANT_SERVICES = "homeassistant_services"
CONF_HOMEASSISTANT_STATES = "homeassistant_states"
CONF_LISTEN_BACKLOG = "listen_backlog"
CONF_MAX_SEND_QUEUE = "max_send_queue"
def validate_encryption_key(value):
@@ -129,32 +101,6 @@ def _encryption_schema(config):
return ENCRYPTION_SCHEMA(config)
def _validate_api_config(config: ConfigType) -> ConfigType:
"""Validate API configuration with mutual exclusivity check and deprecation warning."""
# Check if both password and encryption are configured
has_password = CONF_PASSWORD in config and config[CONF_PASSWORD]
has_encryption = CONF_ENCRYPTION in config
if has_password and has_encryption:
raise cv.Invalid(
"The 'password' and 'encryption' options are mutually exclusive. "
"The API client only supports one authentication method at a time. "
"Please remove one of them. "
"Note: 'password' authentication is deprecated and will be removed in version 2026.1.0. "
"We strongly recommend using 'encryption' instead for better security."
)
# Warn about password deprecation
if has_password:
_LOGGER.warning(
"API 'password' authentication has been deprecated since May 2022 and will be removed in version 2026.1.0. "
"Please migrate to the 'encryption' configuration. "
"See https://esphome.io/components/api.html#configuration-variables"
)
return config
CONFIG_SCHEMA = cv.All(
cv.Schema(
{
@@ -182,50 +128,13 @@ CONFIG_SCHEMA = cv.All(
cv.Optional(CONF_ON_CLIENT_DISCONNECTED): automation.validate_automation(
single=True
),
# Connection limits to prevent memory exhaustion on resource-constrained devices
# Each connection uses ~500-1000 bytes of RAM plus system resources
# Platform defaults based on available RAM and network stack implementation:
cv.SplitDefault(
CONF_LISTEN_BACKLOG,
esp8266=1, # Limited RAM (~40KB free), LWIP raw sockets
esp32=4, # More RAM (520KB), BSD sockets
rp2040=1, # Limited RAM (264KB), LWIP raw sockets like ESP8266
bk72xx=4, # Moderate RAM, BSD-style sockets
rtl87xx=4, # Moderate RAM, BSD-style sockets
host=4, # Abundant resources
ln882x=4, # Moderate RAM
): cv.int_range(min=1, max=10),
cv.SplitDefault(
CONF_MAX_CONNECTIONS,
esp8266=4, # ~40KB free RAM, each connection uses ~500-1000 bytes
esp32=8, # 520KB RAM available
rp2040=4, # 264KB RAM but LWIP constraints
bk72xx=8, # Moderate RAM
rtl87xx=8, # Moderate RAM
host=8, # Abundant resources
ln882x=8, # Moderate RAM
): cv.int_range(min=1, max=20),
# Maximum queued send buffers per connection before dropping connection
# Each buffer uses ~8-12 bytes overhead plus actual message size
# Platform defaults based on available RAM and typical message rates:
cv.SplitDefault(
CONF_MAX_SEND_QUEUE,
esp8266=5, # Limited RAM, need to fail fast
esp32=8, # More RAM, can buffer more
rp2040=5, # Limited RAM
bk72xx=8, # Moderate RAM
rtl87xx=8, # Moderate RAM
host=16, # Abundant resources
ln882x=8, # Moderate RAM
): cv.int_range(min=1, max=64),
}
).extend(cv.COMPONENT_SCHEMA),
cv.rename_key(CONF_SERVICES, CONF_ACTIONS),
_validate_api_config,
)
@coroutine_with_priority(CoroPriority.WEB)
@coroutine_with_priority(40.0)
async def to_code(config):
var = cg.new_Pvariable(config[CONF_ID])
await cg.register_component(var, config)
@@ -236,11 +145,6 @@ async def to_code(config):
cg.add(var.set_password(config[CONF_PASSWORD]))
cg.add(var.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
cg.add(var.set_batch_delay(config[CONF_BATCH_DELAY]))
if CONF_LISTEN_BACKLOG in config:
cg.add(var.set_listen_backlog(config[CONF_LISTEN_BACKLOG]))
if CONF_MAX_CONNECTIONS in config:
cg.add(var.set_max_connections(config[CONF_MAX_CONNECTIONS]))
cg.add_define("API_MAX_SEND_QUEUE", config[CONF_MAX_SEND_QUEUE])
# Set USE_API_SERVICES if any services are enabled
if config.get(CONF_ACTIONS) or config[CONF_CUSTOM_SERVICES]:
@@ -289,7 +193,6 @@ async def to_code(config):
if key := encryption_config.get(CONF_KEY):
decoded = base64.b64decode(key)
cg.add(var.set_noise_psk(list(decoded)))
cg.add_define("USE_API_NOISE_PSK_FROM_YAML")
else:
# No key provided, but encryption desired
# This will allow a plaintext client to provide a noise key,
@@ -309,29 +212,6 @@ async def to_code(config):
KEY_VALUE_SCHEMA = cv.Schema({cv.string: cv.templatable(cv.string_strict)})
def _validate_response_config(config: ConfigType) -> ConfigType:
# Validate dependencies:
# - response_template requires capture_response: true
# - capture_response: true requires on_success
if CONF_RESPONSE_TEMPLATE in config and not config[CONF_CAPTURE_RESPONSE]:
raise cv.Invalid(
f"`{CONF_RESPONSE_TEMPLATE}` requires `{CONF_CAPTURE_RESPONSE}: true` to be set.",
path=[CONF_RESPONSE_TEMPLATE],
)
if config[CONF_CAPTURE_RESPONSE] and CONF_ON_SUCCESS not in config:
raise cv.Invalid(
f"`{CONF_CAPTURE_RESPONSE}: true` requires `{CONF_ON_SUCCESS}` to be set.",
path=[CONF_CAPTURE_RESPONSE],
)
# Track if any action uses capture_response for AUTO_LOAD
if config[CONF_CAPTURE_RESPONSE]:
CORE.data.setdefault(DOMAIN, {})[CONF_CAPTURE_RESPONSE] = True
return config
HOMEASSISTANT_ACTION_ACTION_SCHEMA = cv.All(
cv.Schema(
{
@@ -347,15 +227,10 @@ HOMEASSISTANT_ACTION_ACTION_SCHEMA = cv.All(
cv.Optional(CONF_VARIABLES, default={}): cv.Schema(
{cv.string: cv.returning_lambda}
),
cv.Optional(CONF_RESPONSE_TEMPLATE): cv.templatable(cv.string),
cv.Optional(CONF_CAPTURE_RESPONSE, default=False): cv.boolean,
cv.Optional(CONF_ON_SUCCESS): automation.validate_automation(single=True),
cv.Optional(CONF_ON_ERROR): automation.validate_automation(single=True),
}
),
cv.has_exactly_one_key(CONF_SERVICE, CONF_ACTION),
cv.rename_key(CONF_SERVICE, CONF_ACTION),
_validate_response_config,
)
@@ -369,67 +244,21 @@ HOMEASSISTANT_ACTION_ACTION_SCHEMA = cv.All(
HomeAssistantServiceCallAction,
HOMEASSISTANT_ACTION_ACTION_SCHEMA,
)
async def homeassistant_service_to_code(
config: ConfigType,
action_id: ID,
template_arg: cg.TemplateArguments,
args: TemplateArgsType,
):
async def homeassistant_service_to_code(config, action_id, template_arg, args):
cg.add_define("USE_API_HOMEASSISTANT_SERVICES")
serv = await cg.get_variable(config[CONF_ID])
var = cg.new_Pvariable(action_id, template_arg, serv, False)
templ = await cg.templatable(config[CONF_ACTION], args, None)
cg.add(var.set_service(templ))
# Initialize FixedVectors with exact sizes from config
cg.add(var.init_data(len(config[CONF_DATA])))
for key, value in config[CONF_DATA].items():
templ = await cg.templatable(value, args, None)
cg.add(var.add_data(key, templ))
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
for key, value in config[CONF_DATA_TEMPLATE].items():
templ = await cg.templatable(value, args, None)
cg.add(var.add_data_template(key, templ))
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
for key, value in config[CONF_VARIABLES].items():
templ = await cg.templatable(value, args, None)
cg.add(var.add_variable(key, templ))
if on_error := config.get(CONF_ON_ERROR):
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES")
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES_ERRORS")
cg.add(var.set_wants_status())
await automation.build_automation(
var.get_error_trigger(),
[(cg.std_string, "error"), *args],
on_error,
)
if on_success := config.get(CONF_ON_SUCCESS):
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES")
cg.add(var.set_wants_status())
if config[CONF_CAPTURE_RESPONSE]:
cg.add(var.set_wants_response())
cg.add_define("USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON")
await automation.build_automation(
var.get_success_trigger_with_response(),
[(cg.JsonObjectConst, "response"), *args],
on_success,
)
if response_template := config.get(CONF_RESPONSE_TEMPLATE):
templ = await cg.templatable(response_template, args, cg.std_string)
cg.add(var.set_response_template(templ))
else:
await automation.build_automation(
var.get_success_trigger(),
args,
on_success,
)
return var
@@ -465,23 +294,15 @@ async def homeassistant_event_to_code(config, action_id, template_arg, args):
var = cg.new_Pvariable(action_id, template_arg, serv, True)
templ = await cg.templatable(config[CONF_EVENT], args, None)
cg.add(var.set_service(templ))
# Initialize FixedVectors with exact sizes from config
cg.add(var.init_data(len(config[CONF_DATA])))
for key, value in config[CONF_DATA].items():
templ = await cg.templatable(value, args, None)
cg.add(var.add_data(key, templ))
cg.add(var.init_data_template(len(config[CONF_DATA_TEMPLATE])))
for key, value in config[CONF_DATA_TEMPLATE].items():
templ = await cg.templatable(value, args, None)
cg.add(var.add_data_template(key, templ))
cg.add(var.init_variables(len(config[CONF_VARIABLES])))
for key, value in config[CONF_VARIABLES].items():
templ = await cg.templatable(value, args, None)
cg.add(var.add_variable(key, templ))
return var
@@ -500,12 +321,9 @@ HOMEASSISTANT_TAG_SCANNED_ACTION_SCHEMA = cv.maybe_simple_value(
HOMEASSISTANT_TAG_SCANNED_ACTION_SCHEMA,
)
async def homeassistant_tag_scanned_to_code(config, action_id, template_arg, args):
cg.add_define("USE_API_HOMEASSISTANT_SERVICES")
serv = await cg.get_variable(config[CONF_ID])
var = cg.new_Pvariable(action_id, template_arg, serv, True)
cg.add(var.set_service("esphome.tag_scanned"))
# Initialize FixedVector with exact size (1 data field)
cg.add(var.init_data(1))
templ = await cg.templatable(config[CONF_TAG], args, cg.std_string)
cg.add(var.add_data("tag_id", templ))
return var

View File

@@ -7,7 +7,7 @@ service APIConnection {
option (needs_setup_connection) = false;
option (needs_authentication) = false;
}
rpc authenticate (AuthenticationRequest) returns (AuthenticationResponse) {
rpc connect (ConnectRequest) returns (ConnectResponse) {
option (needs_setup_connection) = false;
option (needs_authentication) = false;
}
@@ -27,6 +27,9 @@ service APIConnection {
rpc subscribe_logs (SubscribeLogsRequest) returns (void) {}
rpc subscribe_homeassistant_services (SubscribeHomeassistantServicesRequest) returns (void) {}
rpc subscribe_home_assistant_states (SubscribeHomeAssistantStatesRequest) returns (void) {}
rpc get_time (GetTimeRequest) returns (GetTimeResponse) {
option (needs_authentication) = false;
}
rpc execute_service (ExecuteServiceRequest) returns (void) {}
rpc noise_encryption_set_key (NoiseEncryptionSetKeyRequest) returns (NoiseEncryptionSetKeyResponse) {}
@@ -66,9 +69,6 @@ service APIConnection {
rpc voice_assistant_set_configuration(VoiceAssistantSetConfiguration) returns (void) {}
rpc alarm_control_panel_command (AlarmControlPanelCommandRequest) returns (void) {}
rpc zwave_proxy_frame(ZWaveProxyFrame) returns (void) {}
rpc zwave_proxy_request(ZWaveProxyRequest) returns (void) {}
}
@@ -102,7 +102,7 @@ message HelloRequest {
// For example "Home Assistant"
// Not strictly necessary to send but nice for debugging
// purposes.
string client_info = 1 [(pointer_to_buffer) = true];
string client_info = 1;
uint32 api_version_major = 2;
uint32 api_version_minor = 3;
}
@@ -132,23 +132,21 @@ message HelloResponse {
// Message sent at the beginning of each connection to authenticate the client
// Can only be sent by the client and only at the beginning of the connection
message AuthenticationRequest {
message ConnectRequest {
option (id) = 3;
option (source) = SOURCE_CLIENT;
option (no_delay) = true;
option (ifdef) = "USE_API_PASSWORD";
// The password to log in with
string password = 1 [(pointer_to_buffer) = true];
string password = 1;
}
// Confirmation of successful connection. After this the connection is available for all traffic.
// Can only be sent by the server and only at the beginning of the connection
message AuthenticationResponse {
message ConnectResponse {
option (id) = 4;
option (source) = SOURCE_SERVER;
option (no_delay) = true;
option (ifdef) = "USE_API_PASSWORD";
bool invalid_password = 1;
}
@@ -257,10 +255,6 @@ message DeviceInfoResponse {
// Top-level area info to phase out suggested_area
AreaInfo area = 22 [(field_ifdef) = "USE_AREAS"];
// Indicates if Z-Wave proxy support is available and features supported
uint32 zwave_proxy_feature_flags = 23 [(field_ifdef) = "USE_ZWAVE_PROXY"];
uint32 zwave_home_id = 24 [(field_ifdef) = "USE_ZWAVE_PROXY"];
}
message ListEntitiesRequest {
@@ -769,33 +763,17 @@ message HomeassistantServiceMap {
string value = 2 [(no_zero_copy) = true];
}
message HomeassistantActionRequest {
message HomeassistantServiceResponse {
option (id) = 35;
option (source) = SOURCE_SERVER;
option (no_delay) = true;
option (ifdef) = "USE_API_HOMEASSISTANT_SERVICES";
string service = 1;
repeated HomeassistantServiceMap data = 2 [(fixed_vector) = true];
repeated HomeassistantServiceMap data_template = 3 [(fixed_vector) = true];
repeated HomeassistantServiceMap variables = 4 [(fixed_vector) = true];
repeated HomeassistantServiceMap data = 2;
repeated HomeassistantServiceMap data_template = 3;
repeated HomeassistantServiceMap variables = 4;
bool is_event = 5;
uint32 call_id = 6 [(field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES"];
bool wants_response = 7 [(field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON"];
string response_template = 8 [(no_zero_copy) = true, (field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON"];
}
// Message sent by Home Assistant to ESPHome with service call response data
message HomeassistantActionResponse {
option (id) = 130;
option (source) = SOURCE_CLIENT;
option (no_delay) = true;
option (ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES";
uint32 call_id = 1; // Matches the call_id from HomeassistantActionRequest
bool success = 2; // Whether the service call succeeded
string error_message = 3; // Error message if success = false
bytes response_data = 4 [(pointer_to_buffer) = true, (field_ifdef) = "USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON"];
}
// ==================== IMPORT HOME ASSISTANT STATES ====================
@@ -831,16 +809,15 @@ message HomeAssistantStateResponse {
// ==================== IMPORT TIME ====================
message GetTimeRequest {
option (id) = 36;
option (source) = SOURCE_SERVER;
option (source) = SOURCE_BOTH;
}
message GetTimeResponse {
option (id) = 37;
option (source) = SOURCE_CLIENT;
option (source) = SOURCE_BOTH;
option (no_delay) = true;
fixed32 epoch_seconds = 1;
string timezone = 2 [(pointer_to_buffer) = true];
}
// ==================== USER-DEFINES SERVICES ====================
@@ -866,7 +843,7 @@ message ListEntitiesServicesResponse {
string name = 1;
fixed32 key = 2;
repeated ListEntitiesServicesArgument args = 3 [(fixed_vector) = true];
repeated ListEntitiesServicesArgument args = 3;
}
message ExecuteServiceArgument {
option (ifdef) = "USE_API_SERVICES";
@@ -876,10 +853,10 @@ message ExecuteServiceArgument {
string string_ = 4;
// ESPHome 1.14 (api v1.3) make int a signed value
sint32 int_ = 5;
repeated bool bool_array = 6 [packed=false, (fixed_vector) = true];
repeated sint32 int_array = 7 [packed=false, (fixed_vector) = true];
repeated float float_array = 8 [packed=false, (fixed_vector) = true];
repeated string string_array = 9 [(fixed_vector) = true];
repeated bool bool_array = 6 [packed=false];
repeated sint32 int_array = 7 [packed=false];
repeated float float_array = 8 [packed=false];
repeated string string_array = 9;
}
message ExecuteServiceRequest {
option (id) = 42;
@@ -888,7 +865,7 @@ message ExecuteServiceRequest {
option (ifdef) = "USE_API_SERVICES";
fixed32 key = 1;
repeated ExecuteServiceArgument args = 2 [(fixed_vector) = true];
repeated ExecuteServiceArgument args = 2;
}
// ==================== CAMERA ====================
@@ -987,8 +964,8 @@ message ListEntitiesClimateResponse {
string name = 3;
reserved 4; // Deprecated: was string unique_id
bool supports_current_temperature = 5; // Deprecated: use feature_flags
bool supports_two_point_target_temperature = 6; // Deprecated: use feature_flags
bool supports_current_temperature = 5;
bool supports_two_point_target_temperature = 6;
repeated ClimateMode supported_modes = 7 [(container_pointer) = "std::set<climate::ClimateMode>"];
float visual_min_temperature = 8;
float visual_max_temperature = 9;
@@ -997,7 +974,7 @@ message ListEntitiesClimateResponse {
// is if CLIMATE_PRESET_AWAY exists is supported_presets
// Deprecated in API version 1.5
bool legacy_supports_away = 11 [deprecated=true];
bool supports_action = 12; // Deprecated: use feature_flags
bool supports_action = 12;
repeated ClimateFanMode supported_fan_modes = 13 [(container_pointer) = "std::set<climate::ClimateFanMode>"];
repeated ClimateSwingMode supported_swing_modes = 14 [(container_pointer) = "std::set<climate::ClimateSwingMode>"];
repeated string supported_custom_fan_modes = 15 [(container_pointer) = "std::set"];
@@ -1007,12 +984,11 @@ message ListEntitiesClimateResponse {
string icon = 19 [(field_ifdef) = "USE_ENTITY_ICON"];
EntityCategory entity_category = 20;
float visual_current_temperature_step = 21;
bool supports_current_humidity = 22; // Deprecated: use feature_flags
bool supports_target_humidity = 23; // Deprecated: use feature_flags
bool supports_current_humidity = 22;
bool supports_target_humidity = 23;
float visual_min_humidity = 24;
float visual_max_humidity = 25;
uint32 device_id = 26 [(field_ifdef) = "USE_DEVICES"];
uint32 feature_flags = 27;
}
message ClimateStateResponse {
option (id) = 47;
@@ -1482,7 +1458,7 @@ message BluetoothDeviceRequest {
uint64 address = 1;
BluetoothDeviceRequestType request_type = 2;
bool has_address_type = 3; // Deprecated, should be removed in 2027.8 - https://github.com/esphome/esphome/pull/10318
bool has_address_type = 3;
uint32 address_type = 4;
}
@@ -1520,7 +1496,7 @@ message BluetoothGATTCharacteristic {
repeated uint64 uuid = 1 [(fixed_array_size) = 2, (fixed_array_skip_zero) = true];
uint32 handle = 2;
uint32 properties = 3;
repeated BluetoothGATTDescriptor descriptors = 4 [(fixed_vector) = true];
repeated BluetoothGATTDescriptor descriptors = 4;
// New field for efficient UUID (v1.12+)
// Only one of uuid or short_uuid will be set.
@@ -1532,7 +1508,7 @@ message BluetoothGATTCharacteristic {
message BluetoothGATTService {
repeated uint64 uuid = 1 [(fixed_array_size) = 2, (fixed_array_skip_zero) = true];
uint32 handle = 2;
repeated BluetoothGATTCharacteristic characteristics = 3 [(fixed_vector) = true];
repeated BluetoothGATTCharacteristic characteristics = 3;
// New field for efficient UUID (v1.12+)
// Only one of uuid or short_uuid will be set.
@@ -1588,7 +1564,7 @@ message BluetoothGATTWriteRequest {
uint32 handle = 2;
bool response = 3;
bytes data = 4 [(pointer_to_buffer) = true];
bytes data = 4;
}
message BluetoothGATTReadDescriptorRequest {
@@ -1608,7 +1584,7 @@ message BluetoothGATTWriteDescriptorRequest {
uint64 address = 1;
uint32 handle = 2;
bytes data = 3 [(pointer_to_buffer) = true];
bytes data = 3;
}
message BluetoothGATTNotifyRequest {
@@ -1736,7 +1712,6 @@ message BluetoothScannerStateResponse {
BluetoothScannerState state = 1;
BluetoothScannerMode mode = 2;
BluetoothScannerMode configured_mode = 3;
}
message BluetoothScannerSetModeRequest {
@@ -1882,22 +1857,10 @@ message VoiceAssistantWakeWord {
repeated string trained_languages = 3;
}
message VoiceAssistantExternalWakeWord {
string id = 1;
string wake_word = 2;
repeated string trained_languages = 3;
string model_type = 4;
uint32 model_size = 5;
string model_hash = 6;
string url = 7;
}
message VoiceAssistantConfigurationRequest {
option (id) = 121;
option (source) = SOURCE_CLIENT;
option (ifdef) = "USE_VOICE_ASSISTANT";
repeated VoiceAssistantExternalWakeWord external_wake_words = 1;
}
message VoiceAssistantConfigurationResponse {
@@ -2312,28 +2275,3 @@ message UpdateCommandRequest {
UpdateCommand command = 2;
uint32 device_id = 3 [(field_ifdef) = "USE_DEVICES"];
}
// ==================== Z-WAVE ====================
message ZWaveProxyFrame {
option (id) = 128;
option (source) = SOURCE_BOTH;
option (ifdef) = "USE_ZWAVE_PROXY";
option (no_delay) = true;
bytes data = 1 [(pointer_to_buffer) = true];
}
enum ZWaveProxyRequestType {
ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE = 0;
ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE = 1;
ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE = 2;
}
message ZWaveProxyRequest {
option (id) = 129;
option (source) = SOURCE_BOTH;
option (ifdef) = "USE_ZWAVE_PROXY";
ZWaveProxyRequestType type = 1;
bytes data = 2 [(pointer_to_buffer) = true];
}

View File

@@ -8,9 +8,9 @@
#endif
#include <cerrno>
#include <cinttypes>
#include <utility>
#include <functional>
#include <limits>
#include <utility>
#include "esphome/components/network/util.h"
#include "esphome/core/application.h"
#include "esphome/core/entity_base.h"
@@ -27,15 +27,9 @@
#ifdef USE_BLUETOOTH_PROXY
#include "esphome/components/bluetooth_proxy/bluetooth_proxy.h"
#endif
#ifdef USE_CLIMATE
#include "esphome/components/climate/climate_mode.h"
#endif
#ifdef USE_VOICE_ASSISTANT
#include "esphome/components/voice_assistant/voice_assistant.h"
#endif
#ifdef USE_ZWAVE_PROXY
#include "esphome/components/zwave_proxy/zwave_proxy.h"
#endif
namespace esphome::api {
@@ -48,8 +42,6 @@ static constexpr uint8_t MAX_PING_RETRIES = 60;
static constexpr uint16_t PING_RETRY_INTERVAL = 1000;
static constexpr uint32_t KEEPALIVE_DISCONNECT_TIMEOUT = (KEEPALIVE_TIMEOUT_MS * 5) / 2;
static constexpr auto ESPHOME_VERSION_REF = StringRef::from_lit(ESPHOME_VERSION);
static const char *const TAG = "api.connection";
#ifdef USE_CAMERA
static const int CAMERA_STOP_STREAM = 5000;
@@ -119,7 +111,8 @@ void APIConnection::start() {
APIError err = this->helper_->init();
if (err != APIError::OK) {
this->fatal_error_with_log_(LOG_STR("Helper init failed"), err);
on_fatal_error();
this->log_warning_("Helper init failed", err);
return;
}
this->client_info_.peername = helper_->getpeername();
@@ -149,7 +142,8 @@ void APIConnection::loop() {
APIError err = this->helper_->loop();
if (err != APIError::OK) {
this->fatal_error_with_log_(LOG_STR("Socket operation failed"), err);
on_fatal_error();
this->log_socket_operation_failed_(err);
return;
}
@@ -164,13 +158,17 @@ void APIConnection::loop() {
// No more data available
break;
} else if (err != APIError::OK) {
this->fatal_error_with_log_(LOG_STR("Reading failed"), err);
on_fatal_error();
this->log_warning_("Reading failed", err);
return;
} else {
this->last_traffic_ = now;
// read a packet
this->read_message(buffer.data_len, buffer.type,
buffer.data_len > 0 ? &buffer.container[buffer.data_offset] : nullptr);
if (buffer.data_len > 0) {
this->read_message(buffer.data_len, buffer.type, &buffer.container[buffer.data_offset]);
} else {
this->read_message(0, buffer.type, nullptr);
}
if (this->flags_.remove)
return;
}
@@ -202,8 +200,7 @@ void APIConnection::loop() {
// Disconnect if not responded within 2.5*keepalive
if (now - this->last_traffic_ > KEEPALIVE_DISCONNECT_TIMEOUT) {
on_fatal_error();
ESP_LOGW(TAG, "%s (%s) is unresponsive; disconnecting", this->client_info_.name.c_str(),
this->client_info_.peername.c_str());
ESP_LOGW(TAG, "%s is unresponsive; disconnecting", this->get_client_combined_info().c_str());
}
} else if (now - this->last_traffic_ > KEEPALIVE_TIMEOUT_MS && !this->flags_.remove) {
// Only send ping if we're not disconnecting
@@ -253,7 +250,7 @@ bool APIConnection::send_disconnect_response(const DisconnectRequest &msg) {
// remote initiated disconnect_client
// don't close yet, we still need to send the disconnect response
// close will happen on next loop
ESP_LOGD(TAG, "%s (%s) disconnected", this->client_info_.name.c_str(), this->client_info_.peername.c_str());
ESP_LOGD(TAG, "%s disconnected", this->get_client_combined_info().c_str());
this->flags_.next_close = true;
DisconnectResponse resp;
return this->send_message(resp, DisconnectResponse::MESSAGE_TYPE);
@@ -468,7 +465,9 @@ uint16_t APIConnection::try_send_light_state(EntityBase *entity, APIConnection *
resp.cold_white = values.get_cold_white();
resp.warm_white = values.get_warm_white();
if (light->supports_effects()) {
resp.set_effect(light->get_effect_name_ref());
// get_effect_name() returns temporary std::string - must store it
std::string effect_name = light->get_effect_name();
resp.set_effect(StringRef(effect_name));
}
return fill_and_encode_entity_state(light, resp, LightStateResponse::MESSAGE_TYPE, conn, remaining_size, is_single);
}
@@ -626,10 +625,9 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
auto traits = climate->get_traits();
resp.mode = static_cast<enums::ClimateMode>(climate->mode);
resp.action = static_cast<enums::ClimateAction>(climate->action);
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_TEMPERATURE))
if (traits.get_supports_current_temperature())
resp.current_temperature = climate->current_temperature;
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TWO_POINT_TARGET_TEMPERATURE |
climate::CLIMATE_REQUIRES_TWO_POINT_TARGET_TEMPERATURE)) {
if (traits.get_supports_two_point_target_temperature()) {
resp.target_temperature_low = climate->target_temperature_low;
resp.target_temperature_high = climate->target_temperature_high;
} else {
@@ -648,9 +646,9 @@ uint16_t APIConnection::try_send_climate_state(EntityBase *entity, APIConnection
}
if (traits.get_supports_swing_modes())
resp.swing_mode = static_cast<enums::ClimateSwingMode>(climate->swing_mode);
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_CURRENT_HUMIDITY))
if (traits.get_supports_current_humidity())
resp.current_humidity = climate->current_humidity;
if (traits.has_feature_flags(climate::CLIMATE_SUPPORTS_TARGET_HUMIDITY))
if (traits.get_supports_target_humidity())
resp.target_humidity = climate->target_humidity;
return fill_and_encode_entity_state(climate, resp, ClimateStateResponse::MESSAGE_TYPE, conn, remaining_size,
is_single);
@@ -660,14 +658,10 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
auto *climate = static_cast<climate::Climate *>(entity);
ListEntitiesClimateResponse msg;
auto traits = climate->get_traits();
// Flags set for backward compatibility, deprecated in 2025.11.0
msg.supports_current_temperature = traits.get_supports_current_temperature();
msg.supports_current_humidity = traits.get_supports_current_humidity();
msg.supports_two_point_target_temperature = traits.get_supports_two_point_target_temperature();
msg.supports_target_humidity = traits.get_supports_target_humidity();
msg.supports_action = traits.get_supports_action();
// Current feature flags and other supported parameters
msg.feature_flags = traits.get_feature_flags();
msg.supported_modes = &traits.get_supported_modes_for_api_();
msg.visual_min_temperature = traits.get_visual_min_temperature();
msg.visual_max_temperature = traits.get_visual_max_temperature();
@@ -675,6 +669,7 @@ uint16_t APIConnection::try_send_climate_info(EntityBase *entity, APIConnection
msg.visual_current_temperature_step = traits.get_visual_current_temperature_step();
msg.visual_min_humidity = traits.get_visual_min_humidity();
msg.visual_max_humidity = traits.get_visual_max_humidity();
msg.supports_action = traits.get_supports_action();
msg.supported_fan_modes = &traits.get_supported_fan_modes_for_api_();
msg.supported_custom_fan_modes = &traits.get_supported_custom_fan_modes_for_api_();
msg.supported_presets = &traits.get_supported_presets_for_api_();
@@ -1077,23 +1072,17 @@ void APIConnection::camera_image(const CameraImageRequest &msg) {
#ifdef USE_HOMEASSISTANT_TIME
void APIConnection::on_get_time_response(const GetTimeResponse &value) {
if (homeassistant::global_homeassistant_time != nullptr) {
if (homeassistant::global_homeassistant_time != nullptr)
homeassistant::global_homeassistant_time->set_epoch_time(value.epoch_seconds);
#ifdef USE_TIME_TIMEZONE
if (value.timezone_len > 0) {
const std::string &current_tz = homeassistant::global_homeassistant_time->get_timezone();
// Compare without allocating a string
if (current_tz.length() != value.timezone_len ||
memcmp(current_tz.c_str(), value.timezone, value.timezone_len) != 0) {
homeassistant::global_homeassistant_time->set_timezone(
std::string(reinterpret_cast<const char *>(value.timezone), value.timezone_len));
}
}
#endif
}
}
#endif
bool APIConnection::send_get_time_response(const GetTimeRequest &msg) {
GetTimeResponse resp;
resp.epoch_seconds = ::time(nullptr);
return this->send_message(resp, GetTimeResponse::MESSAGE_TYPE);
}
#ifdef USE_BLUETOOTH_PROXY
void APIConnection::subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) {
bluetooth_proxy::global_bluetooth_proxy->subscribe_api_connection(this, msg.flags);
@@ -1204,23 +1193,6 @@ bool APIConnection::send_voice_assistant_get_configuration_response(const VoiceA
resp_wake_word.trained_languages.push_back(lang);
}
}
// Filter external wake words
for (auto &wake_word : msg.external_wake_words) {
if (wake_word.model_type != "micro") {
// microWakeWord only
continue;
}
resp.available_wake_words.emplace_back();
auto &resp_wake_word = resp.available_wake_words.back();
resp_wake_word.set_id(StringRef(wake_word.id));
resp_wake_word.set_wake_word(StringRef(wake_word.wake_word));
for (const auto &lang : wake_word.trained_languages) {
resp_wake_word.trained_languages.push_back(lang);
}
}
resp.active_wake_words = &config.active_wake_words;
resp.max_active_wake_words = config.max_active_wake_words;
return this->send_message(resp, VoiceAssistantConfigurationResponse::MESSAGE_TYPE);
@@ -1231,16 +1203,7 @@ void APIConnection::voice_assistant_set_configuration(const VoiceAssistantSetCon
voice_assistant::global_voice_assistant->on_set_configuration(msg.active_wake_words);
}
}
#endif
#ifdef USE_ZWAVE_PROXY
void APIConnection::zwave_proxy_frame(const ZWaveProxyFrame &msg) {
zwave_proxy::global_zwave_proxy->send_frame(msg.data, msg.data_len);
}
void APIConnection::zwave_proxy_request(const ZWaveProxyRequest &msg) {
zwave_proxy::global_zwave_proxy->zwave_proxy_request(this, msg.type);
}
#endif
#ifdef USE_ALARM_CONTROL_PANEL
@@ -1387,7 +1350,7 @@ void APIConnection::complete_authentication_() {
}
this->flags_.connection_state = static_cast<uint8_t>(ConnectionState::AUTHENTICATED);
ESP_LOGD(TAG, "%s (%s) connected", this->client_info_.name.c_str(), this->client_info_.peername.c_str());
ESP_LOGD(TAG, "%s connected", this->get_client_combined_info().c_str());
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
this->parent_->get_client_connected_trigger()->trigger(this->client_info_.name, this->client_info_.peername);
#endif
@@ -1396,15 +1359,10 @@ void APIConnection::complete_authentication_() {
this->send_time_request();
}
#endif
#ifdef USE_ZWAVE_PROXY
if (zwave_proxy::global_zwave_proxy != nullptr) {
zwave_proxy::global_zwave_proxy->api_connection_authenticated(this);
}
#endif
}
bool APIConnection::send_hello_response(const HelloRequest &msg) {
this->client_info_.name.assign(reinterpret_cast<const char *>(msg.client_info), msg.client_info_len);
this->client_info_.name = msg.client_info;
this->client_info_.peername = this->helper_->getpeername();
this->client_api_version_major_ = msg.api_version_major;
this->client_api_version_minor_ = msg.api_version_minor;
@@ -1413,9 +1371,10 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) {
HelloResponse resp;
resp.api_version_major = 1;
resp.api_version_minor = 13;
// Send only the version string - the client only logs this for debugging and doesn't use it otherwise
resp.set_server_info(ESPHOME_VERSION_REF);
resp.api_version_minor = 12;
// Temporary string for concatenation - will be valid during send_message call
std::string server_info = App.get_name() + " (esphome v" ESPHOME_VERSION ")";
resp.set_server_info(StringRef(server_info));
resp.set_name(StringRef(App.get_name()));
#ifdef USE_API_PASSWORD
@@ -1428,17 +1387,20 @@ bool APIConnection::send_hello_response(const HelloRequest &msg) {
return this->send_message(resp, HelloResponse::MESSAGE_TYPE);
}
bool APIConnection::send_connect_response(const ConnectRequest &msg) {
bool correct = true;
#ifdef USE_API_PASSWORD
bool APIConnection::send_authenticate_response(const AuthenticationRequest &msg) {
AuthenticationResponse resp;
correct = this->parent_->check_password(msg.password);
#endif
ConnectResponse resp;
// bool invalid_password = 1;
resp.invalid_password = !this->parent_->check_password(msg.password, msg.password_len);
if (!resp.invalid_password) {
resp.invalid_password = !correct;
if (correct) {
this->complete_authentication_();
}
return this->send_message(resp, AuthenticationResponse::MESSAGE_TYPE);
return this->send_message(resp, ConnectResponse::MESSAGE_TYPE);
}
#endif // USE_API_PASSWORD
bool APIConnection::send_ping_response(const PingRequest &msg) {
PingResponse resp;
@@ -1459,9 +1421,13 @@ bool APIConnection::send_device_info_response(const DeviceInfoRequest &msg) {
std::string mac_address = get_mac_address_pretty();
resp.set_mac_address(StringRef(mac_address));
// Compile-time StringRef constants
static constexpr auto ESPHOME_VERSION_REF = StringRef::from_lit(ESPHOME_VERSION);
resp.set_esphome_version(ESPHOME_VERSION_REF);
resp.set_compilation_time(App.get_compilation_time_ref());
// get_compilation_time() returns temporary std::string - must store it
std::string compilation_time = App.get_compilation_time();
resp.set_compilation_time(StringRef(compilation_time));
// Compile-time StringRef constants for manufacturers
#if defined(USE_ESP8266) || defined(USE_ESP32)
@@ -1502,10 +1468,6 @@ bool APIConnection::send_device_info_response(const DeviceInfoRequest &msg) {
#ifdef USE_VOICE_ASSISTANT
resp.voice_assistant_feature_flags = voice_assistant::global_voice_assistant->get_feature_flags();
#endif
#ifdef USE_ZWAVE_PROXY
resp.zwave_proxy_feature_flags = zwave_proxy::global_zwave_proxy->get_feature_flags();
resp.zwave_home_id = zwave_proxy::global_zwave_proxy->get_home_id();
#endif
#ifdef USE_API_NOISE
resp.api_encryption_supported = true;
#endif
@@ -1556,20 +1518,6 @@ void APIConnection::execute_service(const ExecuteServiceRequest &msg) {
}
}
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
void APIConnection::on_homeassistant_action_response(const HomeassistantActionResponse &msg) {
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
if (msg.response_data_len > 0) {
this->parent_->handle_action_response(msg.call_id, msg.success, msg.error_message, msg.response_data,
msg.response_data_len);
} else
#endif
{
this->parent_->handle_action_response(msg.call_id, msg.success, msg.error_message);
}
};
#endif
#ifdef USE_API_NOISE
bool APIConnection::send_noise_encryption_set_key_response(const NoiseEncryptionSetKeyRequest &msg) {
NoiseEncryptionSetKeyResponse resp;
@@ -1600,7 +1548,8 @@ bool APIConnection::try_to_clear_buffer(bool log_out_of_space) {
delay(0);
APIError err = this->helper_->loop();
if (err != APIError::OK) {
this->fatal_error_with_log_(LOG_STR("Socket operation failed"), err);
on_fatal_error();
this->log_socket_operation_failed_(err);
return false;
}
if (this->helper_->can_write_without_blocking())
@@ -1619,7 +1568,8 @@ bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) {
if (err == APIError::WOULD_BLOCK)
return false;
if (err != APIError::OK) {
this->fatal_error_with_log_(LOG_STR("Packet write failed"), err);
on_fatal_error();
this->log_warning_("Packet write failed", err);
return false;
}
// Do not set last_traffic_ on send
@@ -1628,12 +1578,12 @@ bool APIConnection::send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) {
#ifdef USE_API_PASSWORD
void APIConnection::on_unauthenticated_access() {
this->on_fatal_error();
ESP_LOGD(TAG, "%s (%s) no authentication", this->client_info_.name.c_str(), this->client_info_.peername.c_str());
ESP_LOGD(TAG, "%s access without authentication", this->get_client_combined_info().c_str());
}
#endif
void APIConnection::on_no_setup_connection() {
this->on_fatal_error();
ESP_LOGD(TAG, "%s (%s) no connection setup", this->client_info_.name.c_str(), this->client_info_.peername.c_str());
ESP_LOGD(TAG, "%s access without full connection", this->get_client_combined_info().c_str());
}
void APIConnection::on_fatal_error() {
this->helper_->close();
@@ -1805,7 +1755,8 @@ void APIConnection::process_batch_() {
APIError err = this->helper_->write_protobuf_packets(ProtoWriteBuffer{&shared_buf},
std::span<const PacketInfo>(packet_info, packet_count));
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
this->fatal_error_with_log_(LOG_STR("Batch write failed"), err);
on_fatal_error();
this->log_warning_("Batch write failed", err);
}
#ifdef HAS_PROTO_MESSAGE_DUMP
@@ -1883,10 +1834,11 @@ void APIConnection::process_state_subscriptions_() {
}
#endif // USE_API_HOMEASSISTANT_STATES
void APIConnection::log_warning_(const LogString *message, APIError err) {
ESP_LOGW(TAG, "%s (%s): %s %s errno=%d", this->client_info_.name.c_str(), this->client_info_.peername.c_str(),
LOG_STR_ARG(message), LOG_STR_ARG(api_error_to_logstr(err)), errno);
void APIConnection::log_warning_(const char *message, APIError err) {
ESP_LOGW(TAG, "%s: %s %s errno=%d", this->get_client_combined_info().c_str(), message, api_error_to_str(err), errno);
}
void APIConnection::log_socket_operation_failed_(APIError err) { this->log_warning_("Socket operation failed", err); }
} // namespace esphome::api
#endif

View File

@@ -10,8 +10,8 @@
#include "esphome/core/component.h"
#include "esphome/core/entity_base.h"
#include <functional>
#include <vector>
#include <functional>
namespace esphome::api {
@@ -19,6 +19,14 @@ namespace esphome::api {
struct ClientInfo {
std::string name; // Client name from Hello message
std::string peername; // IP:port from socket
std::string get_combined_info() const {
if (name == peername) {
// Before Hello message, both are the same
return name;
}
return name + " (" + peername + ")";
}
};
// Keepalive timeout in milliseconds
@@ -36,7 +44,7 @@ static constexpr size_t MAX_PACKETS_PER_BATCH = 64; // ESP32 has 8KB+ stack, HO
static constexpr size_t MAX_PACKETS_PER_BATCH = 32; // ESP8266/RP2040/etc have smaller stacks
#endif
class APIConnection final : public APIServerConnection {
class APIConnection : public APIServerConnection {
public:
friend class APIServer;
friend class ListEntitiesIterator;
@@ -124,15 +132,12 @@ class APIConnection final : public APIServerConnection {
#endif
bool try_send_log_message(int level, const char *tag, const char *line, size_t message_len);
#ifdef USE_API_HOMEASSISTANT_SERVICES
void send_homeassistant_action(const HomeassistantActionRequest &call) {
void send_homeassistant_service_call(const HomeassistantServiceResponse &call) {
if (!this->flags_.service_call_subscription)
return;
this->send_message(call, HomeassistantActionRequest::MESSAGE_TYPE);
this->send_message(call, HomeassistantServiceResponse::MESSAGE_TYPE);
}
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
void on_homeassistant_action_response(const HomeassistantActionResponse &msg) override;
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
#endif // USE_API_HOMEASSISTANT_SERVICES
#endif
#ifdef USE_BLUETOOTH_PROXY
void subscribe_bluetooth_le_advertisements(const SubscribeBluetoothLEAdvertisementsRequest &msg) override;
void unsubscribe_bluetooth_le_advertisements(const UnsubscribeBluetoothLEAdvertisementsRequest &msg) override;
@@ -166,11 +171,6 @@ class APIConnection final : public APIServerConnection {
void voice_assistant_set_configuration(const VoiceAssistantSetConfiguration &msg) override;
#endif
#ifdef USE_ZWAVE_PROXY
void zwave_proxy_frame(const ZWaveProxyFrame &msg) override;
void zwave_proxy_request(const ZWaveProxyRequest &msg) override;
#endif
#ifdef USE_ALARM_CONTROL_PANEL
bool send_alarm_control_panel_state(alarm_control_panel::AlarmControlPanel *a_alarm_control_panel);
void alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) override;
@@ -197,9 +197,7 @@ class APIConnection final : public APIServerConnection {
void on_get_time_response(const GetTimeResponse &value) override;
#endif
bool send_hello_response(const HelloRequest &msg) override;
#ifdef USE_API_PASSWORD
bool send_authenticate_response(const AuthenticationRequest &msg) override;
#endif
bool send_connect_response(const ConnectRequest &msg) override;
bool send_disconnect_response(const DisconnectRequest &msg) override;
bool send_ping_response(const PingRequest &msg) override;
bool send_device_info_response(const DeviceInfoRequest &msg) override;
@@ -221,6 +219,7 @@ class APIConnection final : public APIServerConnection {
#ifdef USE_API_HOMEASSISTANT_STATES
void subscribe_home_assistant_states(const SubscribeHomeAssistantStatesRequest &msg) override;
#endif
bool send_get_time_response(const GetTimeRequest &msg) override;
#ifdef USE_API_SERVICES
void execute_service(const ExecuteServiceRequest &msg) override;
#endif
@@ -273,8 +272,7 @@ class APIConnection final : public APIServerConnection {
bool try_to_clear_buffer(bool log_out_of_space);
bool send_buffer(ProtoWriteBuffer buffer, uint8_t message_type) override;
const std::string &get_name() const { return this->client_info_.name; }
const std::string &get_peername() const { return this->client_info_.peername; }
std::string get_client_combined_info() const { return this->client_info_.get_combined_info(); }
protected:
// Helper function to handle authentication completion
@@ -303,17 +301,9 @@ class APIConnection final : public APIServerConnection {
APIConnection *conn, uint32_t remaining_size, bool is_single) {
// Set common fields that are shared by all entity types
msg.key = entity->get_object_id_hash();
// Try to use static reference first to avoid allocation
StringRef static_ref = entity->get_object_id_ref_for_api_();
// Store dynamic string outside the if-else to maintain lifetime
std::string object_id;
if (!static_ref.empty()) {
msg.set_object_id(static_ref);
} else {
// Dynamic case - need to allocate
object_id = entity->get_object_id();
msg.set_object_id(StringRef(object_id));
}
// IMPORTANT: get_object_id() may return a temporary std::string
std::string object_id = entity->get_object_id();
msg.set_object_id(StringRef(object_id));
if (entity->has_own_name()) {
msg.set_name(entity->get_name());
@@ -734,12 +724,9 @@ class APIConnection final : public APIServerConnection {
}
// Helper function to log API errors with errno
void log_warning_(const LogString *message, APIError err);
// Helper to handle fatal errors with logging
inline void fatal_error_with_log_(const LogString *message, APIError err) {
this->on_fatal_error();
this->log_warning_(message, err);
}
void log_warning_(const char *message, APIError err);
// Specific helper for duplicated error message
void log_socket_operation_failed_(APIError err);
};
} // namespace esphome::api

View File

@@ -13,8 +13,7 @@ namespace esphome::api {
static const char *const TAG = "api.frame_helper";
#define HELPER_LOG(msg, ...) \
ESP_LOGVV(TAG, "%s (%s): " msg, this->client_info_->name.c_str(), this->client_info_->peername.c_str(), ##__VA_ARGS__)
#define HELPER_LOG(msg, ...) ESP_LOGVV(TAG, "%s: " msg, this->client_info_->get_combined_info().c_str(), ##__VA_ARGS__)
#ifdef HELPER_LOG_PACKETS
#define LOG_PACKET_RECEIVED(buffer) ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(buffer).c_str())
@@ -24,64 +23,64 @@ static const char *const TAG = "api.frame_helper";
#define LOG_PACKET_SENDING(data, len) ((void) 0)
#endif
const LogString *api_error_to_logstr(APIError err) {
const char *api_error_to_str(APIError err) {
// not using switch to ensure compiler doesn't try to build a big table out of it
if (err == APIError::OK) {
return LOG_STR("OK");
return "OK";
} else if (err == APIError::WOULD_BLOCK) {
return LOG_STR("WOULD_BLOCK");
return "WOULD_BLOCK";
} else if (err == APIError::BAD_INDICATOR) {
return LOG_STR("BAD_INDICATOR");
return "BAD_INDICATOR";
} else if (err == APIError::BAD_DATA_PACKET) {
return LOG_STR("BAD_DATA_PACKET");
return "BAD_DATA_PACKET";
} else if (err == APIError::TCP_NODELAY_FAILED) {
return LOG_STR("TCP_NODELAY_FAILED");
return "TCP_NODELAY_FAILED";
} else if (err == APIError::TCP_NONBLOCKING_FAILED) {
return LOG_STR("TCP_NONBLOCKING_FAILED");
return "TCP_NONBLOCKING_FAILED";
} else if (err == APIError::CLOSE_FAILED) {
return LOG_STR("CLOSE_FAILED");
return "CLOSE_FAILED";
} else if (err == APIError::SHUTDOWN_FAILED) {
return LOG_STR("SHUTDOWN_FAILED");
return "SHUTDOWN_FAILED";
} else if (err == APIError::BAD_STATE) {
return LOG_STR("BAD_STATE");
return "BAD_STATE";
} else if (err == APIError::BAD_ARG) {
return LOG_STR("BAD_ARG");
return "BAD_ARG";
} else if (err == APIError::SOCKET_READ_FAILED) {
return LOG_STR("SOCKET_READ_FAILED");
return "SOCKET_READ_FAILED";
} else if (err == APIError::SOCKET_WRITE_FAILED) {
return LOG_STR("SOCKET_WRITE_FAILED");
return "SOCKET_WRITE_FAILED";
} else if (err == APIError::OUT_OF_MEMORY) {
return LOG_STR("OUT_OF_MEMORY");
return "OUT_OF_MEMORY";
} else if (err == APIError::CONNECTION_CLOSED) {
return LOG_STR("CONNECTION_CLOSED");
return "CONNECTION_CLOSED";
}
#ifdef USE_API_NOISE
else if (err == APIError::BAD_HANDSHAKE_PACKET_LEN) {
return LOG_STR("BAD_HANDSHAKE_PACKET_LEN");
return "BAD_HANDSHAKE_PACKET_LEN";
} else if (err == APIError::HANDSHAKESTATE_READ_FAILED) {
return LOG_STR("HANDSHAKESTATE_READ_FAILED");
return "HANDSHAKESTATE_READ_FAILED";
} else if (err == APIError::HANDSHAKESTATE_WRITE_FAILED) {
return LOG_STR("HANDSHAKESTATE_WRITE_FAILED");
return "HANDSHAKESTATE_WRITE_FAILED";
} else if (err == APIError::HANDSHAKESTATE_BAD_STATE) {
return LOG_STR("HANDSHAKESTATE_BAD_STATE");
return "HANDSHAKESTATE_BAD_STATE";
} else if (err == APIError::CIPHERSTATE_DECRYPT_FAILED) {
return LOG_STR("CIPHERSTATE_DECRYPT_FAILED");
return "CIPHERSTATE_DECRYPT_FAILED";
} else if (err == APIError::CIPHERSTATE_ENCRYPT_FAILED) {
return LOG_STR("CIPHERSTATE_ENCRYPT_FAILED");
return "CIPHERSTATE_ENCRYPT_FAILED";
} else if (err == APIError::HANDSHAKESTATE_SETUP_FAILED) {
return LOG_STR("HANDSHAKESTATE_SETUP_FAILED");
return "HANDSHAKESTATE_SETUP_FAILED";
} else if (err == APIError::HANDSHAKESTATE_SPLIT_FAILED) {
return LOG_STR("HANDSHAKESTATE_SPLIT_FAILED");
return "HANDSHAKESTATE_SPLIT_FAILED";
} else if (err == APIError::BAD_HANDSHAKE_ERROR_BYTE) {
return LOG_STR("BAD_HANDSHAKE_ERROR_BYTE");
return "BAD_HANDSHAKE_ERROR_BYTE";
}
#endif
return LOG_STR("UNKNOWN");
return "UNKNOWN";
}
// Default implementation for loop - handles sending buffered data
APIError APIFrameHelper::loop() {
if (this->tx_buf_count_ > 0) {
if (!this->tx_buf_.empty()) {
APIError err = try_send_tx_buf_();
if (err != APIError::OK && err != APIError::WOULD_BLOCK) {
return err;
@@ -103,20 +102,9 @@ APIError APIFrameHelper::handle_socket_write_error_() {
// Helper method to buffer data from IOVs
void APIFrameHelper::buffer_data_from_iov_(const struct iovec *iov, int iovcnt, uint16_t total_write_len,
uint16_t offset) {
// Check if queue is full
if (this->tx_buf_count_ >= API_MAX_SEND_QUEUE) {
HELPER_LOG("Send queue full (%u buffers), dropping connection", this->tx_buf_count_);
this->state_ = State::FAILED;
return;
}
uint16_t buffer_size = total_write_len - offset;
auto &buffer = this->tx_buf_[this->tx_buf_tail_];
buffer = std::make_unique<SendBuffer>(SendBuffer{
.data = std::make_unique<uint8_t[]>(buffer_size),
.size = buffer_size,
.offset = 0,
});
SendBuffer buffer;
buffer.size = total_write_len - offset;
buffer.data = std::make_unique<uint8_t[]>(buffer.size);
uint16_t to_skip = offset;
uint16_t write_pos = 0;
@@ -129,15 +117,12 @@ void APIFrameHelper::buffer_data_from_iov_(const struct iovec *iov, int iovcnt,
// Include this segment (partially or fully)
const uint8_t *src = reinterpret_cast<uint8_t *>(iov[i].iov_base) + to_skip;
uint16_t len = static_cast<uint16_t>(iov[i].iov_len) - to_skip;
std::memcpy(buffer->data.get() + write_pos, src, len);
std::memcpy(buffer.data.get() + write_pos, src, len);
write_pos += len;
to_skip = 0;
}
}
// Update circular buffer tracking
this->tx_buf_tail_ = (this->tx_buf_tail_ + 1) % API_MAX_SEND_QUEUE;
this->tx_buf_count_++;
this->tx_buf_.push_back(std::move(buffer));
}
// This method writes data to socket or buffers it
@@ -155,7 +140,7 @@ APIError APIFrameHelper::write_raw_(const struct iovec *iov, int iovcnt, uint16_
#endif
// Try to send any existing buffered data first if there is any
if (this->tx_buf_count_ > 0) {
if (!this->tx_buf_.empty()) {
APIError send_result = try_send_tx_buf_();
// If real error occurred (not just WOULD_BLOCK), return it
if (send_result != APIError::OK && send_result != APIError::WOULD_BLOCK) {
@@ -164,7 +149,7 @@ APIError APIFrameHelper::write_raw_(const struct iovec *iov, int iovcnt, uint16_
// If there is still data in the buffer, we can't send, buffer
// the new data and return
if (this->tx_buf_count_ > 0) {
if (!this->tx_buf_.empty()) {
this->buffer_data_from_iov_(iov, iovcnt, total_write_len, 0);
return APIError::OK; // Success, data buffered
}
@@ -192,31 +177,32 @@ APIError APIFrameHelper::write_raw_(const struct iovec *iov, int iovcnt, uint16_
}
// Common implementation for trying to send buffered data
// IMPORTANT: Caller MUST ensure tx_buf_count_ > 0 before calling this method
// IMPORTANT: Caller MUST ensure tx_buf_ is not empty before calling this method
APIError APIFrameHelper::try_send_tx_buf_() {
// Try to send from tx_buf - we assume it's not empty as it's the caller's responsibility to check
while (this->tx_buf_count_ > 0) {
bool tx_buf_empty = false;
while (!tx_buf_empty) {
// Get the first buffer in the queue
SendBuffer *front_buffer = this->tx_buf_[this->tx_buf_head_].get();
SendBuffer &front_buffer = this->tx_buf_.front();
// Try to send the remaining data in this buffer
ssize_t sent = this->socket_->write(front_buffer->current_data(), front_buffer->remaining());
ssize_t sent = this->socket_->write(front_buffer.current_data(), front_buffer.remaining());
if (sent == -1) {
return this->handle_socket_write_error_();
} else if (sent == 0) {
// Nothing sent but not an error
return APIError::WOULD_BLOCK;
} else if (static_cast<uint16_t>(sent) < front_buffer->remaining()) {
} else if (static_cast<uint16_t>(sent) < front_buffer.remaining()) {
// Partially sent, update offset
// Cast to ensure no overflow issues with uint16_t
front_buffer->offset += static_cast<uint16_t>(sent);
front_buffer.offset += static_cast<uint16_t>(sent);
return APIError::WOULD_BLOCK; // Stop processing more buffers if we couldn't send a complete buffer
} else {
// Buffer completely sent, remove it from the queue
this->tx_buf_[this->tx_buf_head_].reset();
this->tx_buf_head_ = (this->tx_buf_head_ + 1) % API_MAX_SEND_QUEUE;
this->tx_buf_count_--;
this->tx_buf_.pop_front();
// Update empty status for the loop condition
tx_buf_empty = this->tx_buf_.empty();
// Continue loop to try sending the next buffer
}
}

View File

@@ -1,8 +1,7 @@
#pragma once
#include <array>
#include <cstdint>
#include <deque>
#include <limits>
#include <memory>
#include <span>
#include <utility>
#include <vector>
@@ -18,17 +17,6 @@ namespace esphome::api {
// uncomment to log raw packets
//#define HELPER_LOG_PACKETS
// Maximum message size limits to prevent OOM on constrained devices
// Handshake messages are limited to a small size for security
static constexpr uint16_t MAX_HANDSHAKE_SIZE = 128;
// Data message limits vary by platform based on available memory
#ifdef USE_ESP8266
static constexpr uint16_t MAX_MESSAGE_SIZE = 8192; // 8 KiB for ESP8266
#else
static constexpr uint16_t MAX_MESSAGE_SIZE = 32768; // 32 KiB for ESP32 and other platforms
#endif
// Forward declaration
struct ClientInfo;
@@ -78,7 +66,7 @@ enum class APIError : uint16_t {
#endif
};
const LogString *api_error_to_logstr(APIError err);
const char *api_error_to_str(APIError err);
class APIFrameHelper {
public:
@@ -91,7 +79,7 @@ class APIFrameHelper {
virtual APIError init() = 0;
virtual APIError loop();
virtual APIError read_packet(ReadPacketBuffer *buffer) = 0;
bool can_write_without_blocking() { return this->state_ == State::DATA && this->tx_buf_count_ == 0; }
bool can_write_without_blocking() { return state_ == State::DATA && tx_buf_.empty(); }
std::string getpeername() { return socket_->getpeername(); }
int getpeername(struct sockaddr *addr, socklen_t *addrlen) { return socket_->getpeername(addr, addrlen); }
APIError close() {
@@ -116,9 +104,9 @@ class APIFrameHelper {
// The buffer contains all messages with appropriate padding before each
virtual APIError write_protobuf_packets(ProtoWriteBuffer buffer, std::span<const PacketInfo> packets) = 0;
// Get the frame header padding required by this protocol
uint8_t frame_header_padding() const { return frame_header_padding_; }
virtual uint8_t frame_header_padding() = 0;
// Get the frame footer size required by this protocol
uint8_t frame_footer_size() const { return frame_footer_size_; }
virtual uint8_t frame_footer_size() = 0;
// Check if socket has data ready to read
bool is_socket_ready() const { return socket_ != nullptr && socket_->ready(); }
@@ -173,7 +161,7 @@ class APIFrameHelper {
};
// Containers (size varies, but typically 12+ bytes on 32-bit)
std::array<std::unique_ptr<SendBuffer>, API_MAX_SEND_QUEUE> tx_buf_;
std::deque<SendBuffer> tx_buf_;
std::vector<struct iovec> reusable_iovs_;
std::vector<uint8_t> rx_buf_;
@@ -186,10 +174,7 @@ class APIFrameHelper {
State state_{State::INITIALIZE};
uint8_t frame_header_padding_{0};
uint8_t frame_footer_size_{0};
uint8_t tx_buf_head_{0};
uint8_t tx_buf_tail_{0};
uint8_t tx_buf_count_{0};
// 8 bytes total, 0 bytes padding
// 5 bytes total, 3 bytes padding
// Common initialization for both plaintext and noise protocols
APIError init_common_();

View File

@@ -10,22 +10,13 @@
#include <cstring>
#include <cinttypes>
#ifdef USE_ESP8266
#include <pgmspace.h>
#endif
namespace esphome::api {
static const char *const TAG = "api.noise";
#ifdef USE_ESP8266
static const char PROLOGUE_INIT[] PROGMEM = "NoiseAPIInit";
#else
static const char *const PROLOGUE_INIT = "NoiseAPIInit";
#endif
static constexpr size_t PROLOGUE_INIT_LEN = 12; // strlen("NoiseAPIInit")
#define HELPER_LOG(msg, ...) \
ESP_LOGVV(TAG, "%s (%s): " msg, this->client_info_->name.c_str(), this->client_info_->peername.c_str(), ##__VA_ARGS__)
#define HELPER_LOG(msg, ...) ESP_LOGVV(TAG, "%s: " msg, this->client_info_->get_combined_info().c_str(), ##__VA_ARGS__)
#ifdef HELPER_LOG_PACKETS
#define LOG_PACKET_RECEIVED(buffer) ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(buffer).c_str())
@@ -36,42 +27,42 @@ static constexpr size_t PROLOGUE_INIT_LEN = 12; // strlen("NoiseAPIInit")
#endif
/// Convert a noise error code to a readable error
const LogString *noise_err_to_logstr(int err) {
std::string noise_err_to_str(int err) {
if (err == NOISE_ERROR_NO_MEMORY)
return LOG_STR("NO_MEMORY");
return "NO_MEMORY";
if (err == NOISE_ERROR_UNKNOWN_ID)
return LOG_STR("UNKNOWN_ID");
return "UNKNOWN_ID";
if (err == NOISE_ERROR_UNKNOWN_NAME)
return LOG_STR("UNKNOWN_NAME");
return "UNKNOWN_NAME";
if (err == NOISE_ERROR_MAC_FAILURE)
return LOG_STR("MAC_FAILURE");
return "MAC_FAILURE";
if (err == NOISE_ERROR_NOT_APPLICABLE)
return LOG_STR("NOT_APPLICABLE");
return "NOT_APPLICABLE";
if (err == NOISE_ERROR_SYSTEM)
return LOG_STR("SYSTEM");
return "SYSTEM";
if (err == NOISE_ERROR_REMOTE_KEY_REQUIRED)
return LOG_STR("REMOTE_KEY_REQUIRED");
return "REMOTE_KEY_REQUIRED";
if (err == NOISE_ERROR_LOCAL_KEY_REQUIRED)
return LOG_STR("LOCAL_KEY_REQUIRED");
return "LOCAL_KEY_REQUIRED";
if (err == NOISE_ERROR_PSK_REQUIRED)
return LOG_STR("PSK_REQUIRED");
return "PSK_REQUIRED";
if (err == NOISE_ERROR_INVALID_LENGTH)
return LOG_STR("INVALID_LENGTH");
return "INVALID_LENGTH";
if (err == NOISE_ERROR_INVALID_PARAM)
return LOG_STR("INVALID_PARAM");
return "INVALID_PARAM";
if (err == NOISE_ERROR_INVALID_STATE)
return LOG_STR("INVALID_STATE");
return "INVALID_STATE";
if (err == NOISE_ERROR_INVALID_NONCE)
return LOG_STR("INVALID_NONCE");
return "INVALID_NONCE";
if (err == NOISE_ERROR_INVALID_PRIVATE_KEY)
return LOG_STR("INVALID_PRIVATE_KEY");
return "INVALID_PRIVATE_KEY";
if (err == NOISE_ERROR_INVALID_PUBLIC_KEY)
return LOG_STR("INVALID_PUBLIC_KEY");
return "INVALID_PUBLIC_KEY";
if (err == NOISE_ERROR_INVALID_FORMAT)
return LOG_STR("INVALID_FORMAT");
return "INVALID_FORMAT";
if (err == NOISE_ERROR_INVALID_SIGNATURE)
return LOG_STR("INVALID_SIGNATURE");
return LOG_STR("UNKNOWN");
return "INVALID_SIGNATURE";
return to_string(err);
}
/// Initialize the frame helper, returns OK if successful.
@@ -84,11 +75,7 @@ APIError APINoiseFrameHelper::init() {
// init prologue
size_t old_size = prologue_.size();
prologue_.resize(old_size + PROLOGUE_INIT_LEN);
#ifdef USE_ESP8266
memcpy_P(prologue_.data() + old_size, PROLOGUE_INIT, PROLOGUE_INIT_LEN);
#else
std::memcpy(prologue_.data() + old_size, PROLOGUE_INIT, PROLOGUE_INIT_LEN);
#endif
state_ = State::CLIENT_HELLO;
return APIError::OK;
@@ -96,18 +83,18 @@ APIError APINoiseFrameHelper::init() {
// Helper for handling handshake frame errors
APIError APINoiseFrameHelper::handle_handshake_frame_error_(APIError aerr) {
if (aerr == APIError::BAD_INDICATOR) {
send_explicit_handshake_reject_(LOG_STR("Bad indicator byte"));
send_explicit_handshake_reject_("Bad indicator byte");
} else if (aerr == APIError::BAD_HANDSHAKE_PACKET_LEN) {
send_explicit_handshake_reject_(LOG_STR("Bad handshake packet len"));
send_explicit_handshake_reject_("Bad handshake packet len");
}
return aerr;
}
// Helper for handling noise library errors
APIError APINoiseFrameHelper::handle_noise_error_(int err, const LogString *func_name, APIError api_err) {
APIError APINoiseFrameHelper::handle_noise_error_(int err, const char *func_name, APIError api_err) {
if (err != 0) {
state_ = State::FAILED;
HELPER_LOG("%s failed: %s", LOG_STR_ARG(func_name), LOG_STR_ARG(noise_err_to_logstr(err)));
HELPER_LOG("%s failed: %s", func_name, noise_err_to_str(err).c_str());
return api_err;
}
return APIError::OK;
@@ -132,16 +119,26 @@ APIError APINoiseFrameHelper::loop() {
return APIFrameHelper::loop();
}
/** Read a packet into the rx_buf_.
/** Read a packet into the rx_buf_. If successful, stores frame data in the frame parameter
*
* @return APIError::OK if a full packet is in rx_buf_
* @param frame: The struct to hold the frame information in.
* msg_start: points to the start of the payload - this pointer is only valid until the next
* try_receive_raw_ call
*
* @return 0 if a full packet is in rx_buf_
* @return -1 if error, check errno.
*
* errno EWOULDBLOCK: Packet could not be read without blocking. Try again later.
* errno ENOMEM: Not enough memory for reading packet.
* errno API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
* errno API_ERROR_HANDSHAKE_PACKET_LEN: Packet too big for this phase.
*/
APIError APINoiseFrameHelper::try_read_frame_() {
APIError APINoiseFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
if (frame == nullptr) {
HELPER_LOG("Bad argument for try_read_frame_");
return APIError::BAD_ARG;
}
// read header
if (rx_header_buf_len_ < 3) {
// no header information yet
@@ -168,17 +165,16 @@ APIError APINoiseFrameHelper::try_read_frame_() {
// read body
uint16_t msg_size = (((uint16_t) rx_header_buf_[1]) << 8) | rx_header_buf_[2];
// Check against size limits to prevent OOM: MAX_HANDSHAKE_SIZE for handshake, MAX_MESSAGE_SIZE for data
uint16_t limit = (state_ == State::DATA) ? MAX_MESSAGE_SIZE : MAX_HANDSHAKE_SIZE;
if (msg_size > limit) {
if (state_ != State::DATA && msg_size > 128) {
// for handshake message only permit up to 128 bytes
state_ = State::FAILED;
HELPER_LOG("Bad packet: message size %u exceeds maximum %u", msg_size, limit);
return (state_ == State::DATA) ? APIError::BAD_DATA_PACKET : APIError::BAD_HANDSHAKE_PACKET_LEN;
HELPER_LOG("Bad packet len for handshake: %d", msg_size);
return APIError::BAD_HANDSHAKE_PACKET_LEN;
}
// Reserve space for body
if (this->rx_buf_.size() != msg_size) {
this->rx_buf_.resize(msg_size);
// reserve space for body
if (rx_buf_.size() != msg_size) {
rx_buf_.resize(msg_size);
}
if (rx_buf_len_ < msg_size) {
@@ -196,12 +192,12 @@ APIError APINoiseFrameHelper::try_read_frame_() {
}
}
LOG_PACKET_RECEIVED(this->rx_buf_);
// Clear state for next frame (rx_buf_ still contains data for caller)
this->rx_buf_len_ = 0;
this->rx_header_buf_len_ = 0;
LOG_PACKET_RECEIVED(rx_buf_);
*frame = std::move(rx_buf_);
// consume msg
rx_buf_ = {};
rx_buf_len_ = 0;
rx_header_buf_len_ = 0;
return APIError::OK;
}
@@ -223,17 +219,18 @@ APIError APINoiseFrameHelper::state_action_() {
}
if (state_ == State::CLIENT_HELLO) {
// waiting for client hello
aerr = this->try_read_frame_();
std::vector<uint8_t> frame;
aerr = try_read_frame_(&frame);
if (aerr != APIError::OK) {
return handle_handshake_frame_error_(aerr);
}
// ignore contents, may be used in future for flags
// Resize for: existing prologue + 2 size bytes + frame data
size_t old_size = this->prologue_.size();
this->prologue_.resize(old_size + 2 + this->rx_buf_.size());
this->prologue_[old_size] = (uint8_t) (this->rx_buf_.size() >> 8);
this->prologue_[old_size + 1] = (uint8_t) this->rx_buf_.size();
std::memcpy(this->prologue_.data() + old_size + 2, this->rx_buf_.data(), this->rx_buf_.size());
size_t old_size = prologue_.size();
prologue_.resize(old_size + 2 + frame.size());
prologue_[old_size] = (uint8_t) (frame.size() >> 8);
prologue_[old_size + 1] = (uint8_t) frame.size();
std::memcpy(prologue_.data() + old_size + 2, frame.data(), frame.size());
state_ = State::SERVER_HELLO;
}
@@ -242,6 +239,7 @@ APIError APINoiseFrameHelper::state_action_() {
const std::string &name = App.get_name();
const std::string &mac = get_mac_address();
std::vector<uint8_t> msg;
// Calculate positions and sizes
size_t name_len = name.size() + 1; // including null terminator
size_t mac_len = mac.size() + 1; // including null terminator
@@ -249,17 +247,17 @@ APIError APINoiseFrameHelper::state_action_() {
size_t mac_offset = name_offset + name_len;
size_t total_size = 1 + name_len + mac_len;
auto msg = std::make_unique<uint8_t[]>(total_size);
msg.resize(total_size);
// chosen proto
msg[0] = 0x01;
// node name, terminated by null byte
std::memcpy(msg.get() + name_offset, name.c_str(), name_len);
std::memcpy(msg.data() + name_offset, name.c_str(), name_len);
// node mac, terminated by null byte
std::memcpy(msg.get() + mac_offset, mac.c_str(), mac_len);
std::memcpy(msg.data() + mac_offset, mac.c_str(), mac_len);
aerr = write_frame_(msg.get(), total_size);
aerr = write_frame_(msg.data(), msg.size());
if (aerr != APIError::OK)
return aerr;
@@ -274,30 +272,29 @@ APIError APINoiseFrameHelper::state_action_() {
int action = noise_handshakestate_get_action(handshake_);
if (action == NOISE_ACTION_READ_MESSAGE) {
// waiting for handshake msg
aerr = this->try_read_frame_();
std::vector<uint8_t> frame;
aerr = try_read_frame_(&frame);
if (aerr != APIError::OK) {
return handle_handshake_frame_error_(aerr);
}
if (this->rx_buf_.empty()) {
send_explicit_handshake_reject_(LOG_STR("Empty handshake message"));
if (frame.empty()) {
send_explicit_handshake_reject_("Empty handshake message");
return APIError::BAD_HANDSHAKE_ERROR_BYTE;
} else if (this->rx_buf_[0] != 0x00) {
HELPER_LOG("Bad handshake error byte: %u", this->rx_buf_[0]);
send_explicit_handshake_reject_(LOG_STR("Bad handshake error byte"));
} else if (frame[0] != 0x00) {
HELPER_LOG("Bad handshake error byte: %u", frame[0]);
send_explicit_handshake_reject_("Bad handshake error byte");
return APIError::BAD_HANDSHAKE_ERROR_BYTE;
}
NoiseBuffer mbuf;
noise_buffer_init(mbuf);
noise_buffer_set_input(mbuf, this->rx_buf_.data() + 1, this->rx_buf_.size() - 1);
noise_buffer_set_input(mbuf, frame.data() + 1, frame.size() - 1);
err = noise_handshakestate_read_message(handshake_, &mbuf, nullptr);
if (err != 0) {
// Special handling for MAC failure
send_explicit_handshake_reject_(err == NOISE_ERROR_MAC_FAILURE ? LOG_STR("Handshake MAC failure")
: LOG_STR("Handshake error"));
return handle_noise_error_(err, LOG_STR("noise_handshakestate_read_message"),
APIError::HANDSHAKESTATE_READ_FAILED);
send_explicit_handshake_reject_(err == NOISE_ERROR_MAC_FAILURE ? "Handshake MAC failure" : "Handshake error");
return handle_noise_error_(err, "noise_handshakestate_read_message", APIError::HANDSHAKESTATE_READ_FAILED);
}
aerr = check_handshake_finished_();
@@ -310,8 +307,8 @@ APIError APINoiseFrameHelper::state_action_() {
noise_buffer_set_output(mbuf, buffer + 1, sizeof(buffer) - 1);
err = noise_handshakestate_write_message(handshake_, &mbuf, nullptr);
APIError aerr_write = handle_noise_error_(err, LOG_STR("noise_handshakestate_write_message"),
APIError::HANDSHAKESTATE_WRITE_FAILED);
APIError aerr_write =
handle_noise_error_(err, "noise_handshakestate_write_message", APIError::HANDSHAKESTATE_WRITE_FAILED);
if (aerr_write != APIError::OK)
return aerr_write;
buffer[0] = 0x00; // success
@@ -334,66 +331,51 @@ APIError APINoiseFrameHelper::state_action_() {
}
return APIError::OK;
}
void APINoiseFrameHelper::send_explicit_handshake_reject_(const LogString *reason) {
#ifdef USE_STORE_LOG_STR_IN_FLASH
// On ESP8266 with flash strings, we need to use PROGMEM-aware functions
size_t reason_len = strlen_P(reinterpret_cast<PGM_P>(reason));
size_t data_size = reason_len + 1;
auto data = std::make_unique<uint8_t[]>(data_size);
data[0] = 0x01; // failure
// Copy error message from PROGMEM
if (reason_len > 0) {
memcpy_P(data.get() + 1, reinterpret_cast<PGM_P>(reason), reason_len);
}
#else
// Normal memory access
const char *reason_str = LOG_STR_ARG(reason);
size_t reason_len = strlen(reason_str);
size_t data_size = reason_len + 1;
auto data = std::make_unique<uint8_t[]>(data_size);
void APINoiseFrameHelper::send_explicit_handshake_reject_(const std::string &reason) {
std::vector<uint8_t> data;
data.resize(reason.length() + 1);
data[0] = 0x01; // failure
// Copy error message in bulk
if (reason_len > 0) {
std::memcpy(data.get() + 1, reason_str, reason_len);
if (!reason.empty()) {
std::memcpy(data.data() + 1, reason.c_str(), reason.length());
}
#endif
// temporarily remove failed state
auto orig_state = state_;
state_ = State::EXPLICIT_REJECT;
write_frame_(data.get(), data_size);
write_frame_(data.data(), data.size());
state_ = orig_state;
}
APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
APIError aerr = this->state_action_();
int err;
APIError aerr;
aerr = state_action_();
if (aerr != APIError::OK) {
return aerr;
}
if (this->state_ != State::DATA) {
if (state_ != State::DATA) {
return APIError::WOULD_BLOCK;
}
aerr = this->try_read_frame_();
std::vector<uint8_t> frame;
aerr = try_read_frame_(&frame);
if (aerr != APIError::OK)
return aerr;
NoiseBuffer mbuf;
noise_buffer_init(mbuf);
noise_buffer_set_inout(mbuf, this->rx_buf_.data(), this->rx_buf_.size(), this->rx_buf_.size());
int err = noise_cipherstate_decrypt(this->recv_cipher_, &mbuf);
APIError decrypt_err =
handle_noise_error_(err, LOG_STR("noise_cipherstate_decrypt"), APIError::CIPHERSTATE_DECRYPT_FAILED);
if (decrypt_err != APIError::OK) {
noise_buffer_set_inout(mbuf, frame.data(), frame.size(), frame.size());
err = noise_cipherstate_decrypt(recv_cipher_, &mbuf);
APIError decrypt_err = handle_noise_error_(err, "noise_cipherstate_decrypt", APIError::CIPHERSTATE_DECRYPT_FAILED);
if (decrypt_err != APIError::OK)
return decrypt_err;
}
uint16_t msg_size = mbuf.size;
uint8_t *msg_data = this->rx_buf_.data();
uint8_t *msg_data = frame.data();
if (msg_size < 4) {
this->state_ = State::FAILED;
state_ = State::FAILED;
HELPER_LOG("Bad data packet: size %d too short", msg_size);
return APIError::BAD_DATA_PACKET;
}
@@ -401,12 +383,12 @@ APIError APINoiseFrameHelper::read_packet(ReadPacketBuffer *buffer) {
uint16_t type = (((uint16_t) msg_data[0]) << 8) | msg_data[1];
uint16_t data_len = (((uint16_t) msg_data[2]) << 8) | msg_data[3];
if (data_len > msg_size - 4) {
this->state_ = State::FAILED;
state_ = State::FAILED;
HELPER_LOG("Bad data packet: data_len %u greater than msg_size %u", data_len, msg_size);
return APIError::BAD_DATA_PACKET;
}
buffer->container = std::move(this->rx_buf_);
buffer->container = std::move(frame);
buffer->data_offset = 4;
buffer->data_len = data_len;
buffer->type = type;
@@ -468,8 +450,7 @@ APIError APINoiseFrameHelper::write_protobuf_packets(ProtoWriteBuffer buffer, st
4 + packet.payload_size + frame_footer_size_);
int err = noise_cipherstate_encrypt(send_cipher_, &mbuf);
APIError aerr =
handle_noise_error_(err, LOG_STR("noise_cipherstate_encrypt"), APIError::CIPHERSTATE_ENCRYPT_FAILED);
APIError aerr = handle_noise_error_(err, "noise_cipherstate_encrypt", APIError::CIPHERSTATE_ENCRYPT_FAILED);
if (aerr != APIError::OK)
return aerr;
@@ -523,27 +504,25 @@ APIError APINoiseFrameHelper::init_handshake_() {
nid_.modifier_ids[0] = NOISE_MODIFIER_PSK0;
err = noise_handshakestate_new_by_id(&handshake_, &nid_, NOISE_ROLE_RESPONDER);
APIError aerr =
handle_noise_error_(err, LOG_STR("noise_handshakestate_new_by_id"), APIError::HANDSHAKESTATE_SETUP_FAILED);
APIError aerr = handle_noise_error_(err, "noise_handshakestate_new_by_id", APIError::HANDSHAKESTATE_SETUP_FAILED);
if (aerr != APIError::OK)
return aerr;
const auto &psk = ctx_->get_psk();
err = noise_handshakestate_set_pre_shared_key(handshake_, psk.data(), psk.size());
aerr = handle_noise_error_(err, LOG_STR("noise_handshakestate_set_pre_shared_key"),
APIError::HANDSHAKESTATE_SETUP_FAILED);
aerr = handle_noise_error_(err, "noise_handshakestate_set_pre_shared_key", APIError::HANDSHAKESTATE_SETUP_FAILED);
if (aerr != APIError::OK)
return aerr;
err = noise_handshakestate_set_prologue(handshake_, prologue_.data(), prologue_.size());
aerr = handle_noise_error_(err, LOG_STR("noise_handshakestate_set_prologue"), APIError::HANDSHAKESTATE_SETUP_FAILED);
aerr = handle_noise_error_(err, "noise_handshakestate_set_prologue", APIError::HANDSHAKESTATE_SETUP_FAILED);
if (aerr != APIError::OK)
return aerr;
// set_prologue copies it into handshakestate, so we can get rid of it now
prologue_ = {};
err = noise_handshakestate_start(handshake_);
aerr = handle_noise_error_(err, LOG_STR("noise_handshakestate_start"), APIError::HANDSHAKESTATE_SETUP_FAILED);
aerr = handle_noise_error_(err, "noise_handshakestate_start", APIError::HANDSHAKESTATE_SETUP_FAILED);
if (aerr != APIError::OK)
return aerr;
return APIError::OK;
@@ -561,8 +540,7 @@ APIError APINoiseFrameHelper::check_handshake_finished_() {
return APIError::HANDSHAKESTATE_BAD_STATE;
}
int err = noise_handshakestate_split(handshake_, &send_cipher_, &recv_cipher_);
APIError aerr =
handle_noise_error_(err, LOG_STR("noise_handshakestate_split"), APIError::HANDSHAKESTATE_SPLIT_FAILED);
APIError aerr = handle_noise_error_(err, "noise_handshakestate_split", APIError::HANDSHAKESTATE_SPLIT_FAILED);
if (aerr != APIError::OK)
return aerr;

View File

@@ -7,7 +7,7 @@
namespace esphome::api {
class APINoiseFrameHelper final : public APIFrameHelper {
class APINoiseFrameHelper : public APIFrameHelper {
public:
APINoiseFrameHelper(std::unique_ptr<socket::Socket> socket, std::shared_ptr<APINoiseContext> ctx,
const ClientInfo *client_info)
@@ -25,16 +25,20 @@ class APINoiseFrameHelper final : public APIFrameHelper {
APIError read_packet(ReadPacketBuffer *buffer) override;
APIError write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) override;
APIError write_protobuf_packets(ProtoWriteBuffer buffer, std::span<const PacketInfo> packets) override;
// Get the frame header padding required by this protocol
uint8_t frame_header_padding() override { return frame_header_padding_; }
// Get the frame footer size required by this protocol
uint8_t frame_footer_size() override { return frame_footer_size_; }
protected:
APIError state_action_();
APIError try_read_frame_();
APIError try_read_frame_(std::vector<uint8_t> *frame);
APIError write_frame_(const uint8_t *data, uint16_t len);
APIError init_handshake_();
APIError check_handshake_finished_();
void send_explicit_handshake_reject_(const LogString *reason);
void send_explicit_handshake_reject_(const std::string &reason);
APIError handle_handshake_frame_error_(APIError aerr);
APIError handle_noise_error_(int err, const LogString *func_name, APIError api_err);
APIError handle_noise_error_(int err, const char *func_name, APIError api_err);
// Pointers first (4 bytes each)
NoiseHandshakeState *handshake_{nullptr};

View File

@@ -10,16 +10,11 @@
#include <cstring>
#include <cinttypes>
#ifdef USE_ESP8266
#include <pgmspace.h>
#endif
namespace esphome::api {
static const char *const TAG = "api.plaintext";
#define HELPER_LOG(msg, ...) \
ESP_LOGVV(TAG, "%s (%s): " msg, this->client_info_->name.c_str(), this->client_info_->peername.c_str(), ##__VA_ARGS__)
#define HELPER_LOG(msg, ...) ESP_LOGVV(TAG, "%s: " msg, this->client_info_->get_combined_info().c_str(), ##__VA_ARGS__)
#ifdef HELPER_LOG_PACKETS
#define LOG_PACKET_RECEIVED(buffer) ESP_LOGVV(TAG, "Received frame: %s", format_hex_pretty(buffer).c_str())
@@ -47,13 +42,21 @@ APIError APIPlaintextFrameHelper::loop() {
return APIFrameHelper::loop();
}
/** Read a packet into the rx_buf_.
/** Read a packet into the rx_buf_. If successful, stores frame data in the frame parameter
*
* @param frame: The struct to hold the frame information in.
* msg: store the parsed frame in that struct
*
* @return See APIError
*
* error API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
*/
APIError APIPlaintextFrameHelper::try_read_frame_() {
APIError APIPlaintextFrameHelper::try_read_frame_(std::vector<uint8_t> *frame) {
if (frame == nullptr) {
HELPER_LOG("Bad argument for try_read_frame_");
return APIError::BAD_ARG;
}
// read header
while (!rx_header_parsed_) {
// Now that we know when the socket is ready, we can read up to 3 bytes
@@ -115,10 +118,10 @@ APIError APIPlaintextFrameHelper::try_read_frame_() {
continue;
}
if (msg_size_varint->as_uint32() > MAX_MESSAGE_SIZE) {
if (msg_size_varint->as_uint32() > std::numeric_limits<uint16_t>::max()) {
state_ = State::FAILED;
HELPER_LOG("Bad packet: message size %" PRIu32 " exceeds maximum %u", msg_size_varint->as_uint32(),
MAX_MESSAGE_SIZE);
std::numeric_limits<uint16_t>::max());
return APIError::BAD_DATA_PACKET;
}
rx_header_parsed_len_ = msg_size_varint->as_uint16();
@@ -142,9 +145,9 @@ APIError APIPlaintextFrameHelper::try_read_frame_() {
}
// header reading done
// Reserve space for body
if (this->rx_buf_.size() != this->rx_header_parsed_len_) {
this->rx_buf_.resize(this->rx_header_parsed_len_);
// reserve space for body
if (rx_buf_.size() != rx_header_parsed_len_) {
rx_buf_.resize(rx_header_parsed_len_);
}
if (rx_buf_len_ < rx_header_parsed_len_) {
@@ -162,22 +165,24 @@ APIError APIPlaintextFrameHelper::try_read_frame_() {
}
}
LOG_PACKET_RECEIVED(this->rx_buf_);
// Clear state for next frame (rx_buf_ still contains data for caller)
this->rx_buf_len_ = 0;
this->rx_header_buf_pos_ = 0;
this->rx_header_parsed_ = false;
LOG_PACKET_RECEIVED(rx_buf_);
*frame = std::move(rx_buf_);
// consume msg
rx_buf_ = {};
rx_buf_len_ = 0;
rx_header_buf_pos_ = 0;
rx_header_parsed_ = false;
return APIError::OK;
}
APIError APIPlaintextFrameHelper::read_packet(ReadPacketBuffer *buffer) {
if (this->state_ != State::DATA) {
APIError aerr;
if (state_ != State::DATA) {
return APIError::WOULD_BLOCK;
}
APIError aerr = this->try_read_frame_();
std::vector<uint8_t> frame;
aerr = try_read_frame_(&frame);
if (aerr != APIError::OK) {
if (aerr == APIError::BAD_INDICATOR) {
// Make sure to tell the remote that we don't
@@ -192,28 +197,19 @@ APIError APIPlaintextFrameHelper::read_packet(ReadPacketBuffer *buffer) {
// We must send at least 3 bytes to be read, so we add
// a message after the indicator byte to ensures its long
// enough and can aid in debugging.
static constexpr uint8_t INDICATOR_MSG_SIZE = 19;
#ifdef USE_ESP8266
static const char MSG_PROGMEM[] PROGMEM = "\x00"
"Bad indicator byte";
char msg[INDICATOR_MSG_SIZE];
memcpy_P(msg, MSG_PROGMEM, INDICATOR_MSG_SIZE);
const char msg[] = "\x00"
"Bad indicator byte";
iov[0].iov_base = (void *) msg;
#else
static const char MSG[] = "\x00"
"Bad indicator byte";
iov[0].iov_base = (void *) MSG;
#endif
iov[0].iov_len = INDICATOR_MSG_SIZE;
this->write_raw_(iov, 1, INDICATOR_MSG_SIZE);
iov[0].iov_len = 19;
this->write_raw_(iov, 1, 19);
}
return aerr;
}
buffer->container = std::move(this->rx_buf_);
buffer->container = std::move(frame);
buffer->data_offset = 0;
buffer->data_len = this->rx_header_parsed_len_;
buffer->type = this->rx_header_parsed_type_;
buffer->data_len = rx_header_parsed_len_;
buffer->type = rx_header_parsed_type_;
return APIError::OK;
}
APIError APIPlaintextFrameHelper::write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) {
@@ -239,8 +235,8 @@ APIError APIPlaintextFrameHelper::write_protobuf_packets(ProtoWriteBuffer buffer
for (const auto &packet : packets) {
// Calculate varint sizes for header layout
uint8_t size_varint_len = api::ProtoSize::varint(static_cast<uint32_t>(packet.payload_size));
uint8_t type_varint_len = api::ProtoSize::varint(static_cast<uint32_t>(packet.message_type));
uint8_t size_varint_len = api::ProtoSize::varint(packet.payload_size);
uint8_t type_varint_len = api::ProtoSize::varint(packet.message_type);
uint8_t total_header_len = 1 + size_varint_len + type_varint_len;
// Calculate where to start writing the header
@@ -275,9 +271,8 @@ APIError APIPlaintextFrameHelper::write_protobuf_packets(ProtoWriteBuffer buffer
buf_start[header_offset] = 0x00; // indicator
// Encode varints directly into buffer
ProtoVarInt(packet.payload_size).encode_to_buffer_unchecked(buf_start + header_offset + 1, size_varint_len);
ProtoVarInt(packet.message_type)
.encode_to_buffer_unchecked(buf_start + header_offset + 1 + size_varint_len, type_varint_len);
encode_varint_unchecked(buf_start + header_offset + 1, packet.payload_size);
encode_varint_unchecked(buf_start + header_offset + 1 + size_varint_len, packet.message_type);
// Add iovec for this packet (header + payload)
size_t packet_len = static_cast<size_t>(total_header_len + packet.payload_size);

View File

@@ -5,7 +5,7 @@
namespace esphome::api {
class APIPlaintextFrameHelper final : public APIFrameHelper {
class APIPlaintextFrameHelper : public APIFrameHelper {
public:
APIPlaintextFrameHelper(std::unique_ptr<socket::Socket> socket, const ClientInfo *client_info)
: APIFrameHelper(std::move(socket), client_info) {
@@ -22,9 +22,12 @@ class APIPlaintextFrameHelper final : public APIFrameHelper {
APIError read_packet(ReadPacketBuffer *buffer) override;
APIError write_protobuf_packet(uint8_t type, ProtoWriteBuffer buffer) override;
APIError write_protobuf_packets(ProtoWriteBuffer buffer, std::span<const PacketInfo> packets) override;
uint8_t frame_header_padding() override { return frame_header_padding_; }
// Get the frame footer size required by this protocol
uint8_t frame_footer_size() override { return frame_footer_size_; }
protected:
APIError try_read_frame_();
APIError try_read_frame_(std::vector<uint8_t> *frame);
// Group 2-byte aligned types
uint16_t rx_header_parsed_type_ = 0;

View File

@@ -32,13 +32,6 @@ extend google.protobuf.FieldOptions {
optional string fixed_array_size_define = 50010;
optional string fixed_array_with_length_define = 50011;
// pointer_to_buffer: Use pointer instead of array for fixed-size byte fields
// When set, the field will be declared as a pointer (const uint8_t *data)
// instead of an array (uint8_t data[N]). This allows zero-copy on decode
// by pointing directly to the protobuf buffer. The buffer must remain valid
// until the message is processed (which is guaranteed for stack-allocated messages).
optional bool pointer_to_buffer = 50012 [default=false];
// container_pointer: Zero-copy optimization for repeated fields.
//
// When container_pointer is set on a repeated field, the generated message will
@@ -64,10 +57,4 @@ extend google.protobuf.FieldOptions {
// This is typically done through methods returning const T& or special accessor
// methods like get_options() or supported_modes_for_api_().
optional string container_pointer = 50001;
// fixed_vector: Use FixedVector instead of std::vector for repeated fields
// When set, the repeated field will use FixedVector<T> which requires calling
// init(size) before adding elements. This eliminates std::vector template overhead
// and is ideal when the exact size is known before populating the array.
optional bool fixed_vector = 50013 [default=false];
}

View File

@@ -22,12 +22,9 @@ bool HelloRequest::decode_varint(uint32_t field_id, ProtoVarInt value) {
}
bool HelloRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 1: {
// Use raw data directly to avoid allocation
this->client_info = value.data();
this->client_info_len = value.size();
case 1:
this->client_info = value.as_string();
break;
}
default:
return false;
}
@@ -45,23 +42,18 @@ void HelloResponse::calculate_size(ProtoSize &size) const {
size.add_length(1, this->server_info_ref_.size());
size.add_length(1, this->name_ref_.size());
}
#ifdef USE_API_PASSWORD
bool AuthenticationRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
bool ConnectRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 1: {
// Use raw data directly to avoid allocation
this->password = value.data();
this->password_len = value.size();
case 1:
this->password = value.as_string();
break;
}
default:
return false;
}
return true;
}
void AuthenticationResponse::encode(ProtoWriteBuffer buffer) const { buffer.encode_bool(1, this->invalid_password); }
void AuthenticationResponse::calculate_size(ProtoSize &size) const { size.add_bool(1, this->invalid_password); }
#endif
void ConnectResponse::encode(ProtoWriteBuffer buffer) const { buffer.encode_bool(1, this->invalid_password); }
void ConnectResponse::calculate_size(ProtoSize &size) const { size.add_bool(1, this->invalid_password); }
#ifdef USE_AREAS
void AreaInfo::encode(ProtoWriteBuffer buffer) const {
buffer.encode_uint32(1, this->area_id);
@@ -135,12 +127,6 @@ void DeviceInfoResponse::encode(ProtoWriteBuffer buffer) const {
#ifdef USE_AREAS
buffer.encode_message(22, this->area);
#endif
#ifdef USE_ZWAVE_PROXY
buffer.encode_uint32(23, this->zwave_proxy_feature_flags);
#endif
#ifdef USE_ZWAVE_PROXY
buffer.encode_uint32(24, this->zwave_home_id);
#endif
}
void DeviceInfoResponse::calculate_size(ProtoSize &size) const {
#ifdef USE_API_PASSWORD
@@ -193,12 +179,6 @@ void DeviceInfoResponse::calculate_size(ProtoSize &size) const {
#ifdef USE_AREAS
size.add_message_object(2, this->area);
#endif
#ifdef USE_ZWAVE_PROXY
size.add_uint32(2, this->zwave_proxy_feature_flags);
#endif
#ifdef USE_ZWAVE_PROXY
size.add_uint32(2, this->zwave_home_id);
#endif
}
#ifdef USE_BINARY_SENSOR
void ListEntitiesBinarySensorResponse::encode(ProtoWriteBuffer buffer) const {
@@ -872,7 +852,7 @@ void HomeassistantServiceMap::calculate_size(ProtoSize &size) const {
size.add_length(1, this->key_ref_.size());
size.add_length(1, this->value.size());
}
void HomeassistantActionRequest::encode(ProtoWriteBuffer buffer) const {
void HomeassistantServiceResponse::encode(ProtoWriteBuffer buffer) const {
buffer.encode_string(1, this->service_ref_);
for (auto &it : this->data) {
buffer.encode_message(2, it, true);
@@ -884,64 +864,13 @@ void HomeassistantActionRequest::encode(ProtoWriteBuffer buffer) const {
buffer.encode_message(4, it, true);
}
buffer.encode_bool(5, this->is_event);
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
buffer.encode_uint32(6, this->call_id);
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
buffer.encode_bool(7, this->wants_response);
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
buffer.encode_string(8, this->response_template);
#endif
}
void HomeassistantActionRequest::calculate_size(ProtoSize &size) const {
void HomeassistantServiceResponse::calculate_size(ProtoSize &size) const {
size.add_length(1, this->service_ref_.size());
size.add_repeated_message(1, this->data);
size.add_repeated_message(1, this->data_template);
size.add_repeated_message(1, this->variables);
size.add_bool(1, this->is_event);
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
size.add_uint32(1, this->call_id);
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
size.add_bool(1, this->wants_response);
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
size.add_length(1, this->response_template.size());
#endif
}
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
bool HomeassistantActionResponse::decode_varint(uint32_t field_id, ProtoVarInt value) {
switch (field_id) {
case 1:
this->call_id = value.as_uint32();
break;
case 2:
this->success = value.as_bool();
break;
default:
return false;
}
return true;
}
bool HomeassistantActionResponse::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 3:
this->error_message = value.as_string();
break;
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
case 4: {
// Use raw data directly to avoid allocation
this->response_data = value.data();
this->response_data_len = value.size();
break;
}
#endif
default:
return false;
}
return true;
}
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
@@ -972,19 +901,6 @@ bool HomeAssistantStateResponse::decode_length(uint32_t field_id, ProtoLengthDel
return true;
}
#endif
bool GetTimeResponse::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 2: {
// Use raw data directly to avoid allocation
this->timezone = value.data();
this->timezone_len = value.size();
break;
}
default:
return false;
}
return true;
}
bool GetTimeResponse::decode_32bit(uint32_t field_id, Proto32Bit value) {
switch (field_id) {
case 1:
@@ -995,6 +911,8 @@ bool GetTimeResponse::decode_32bit(uint32_t field_id, Proto32Bit value) {
}
return true;
}
void GetTimeResponse::encode(ProtoWriteBuffer buffer) const { buffer.encode_fixed32(1, this->epoch_seconds); }
void GetTimeResponse::calculate_size(ProtoSize &size) const { size.add_fixed32(1, this->epoch_seconds); }
#ifdef USE_API_SERVICES
void ListEntitiesServicesArgument::encode(ProtoWriteBuffer buffer) const {
buffer.encode_string(1, this->name_ref_);
@@ -1064,17 +982,6 @@ bool ExecuteServiceArgument::decode_32bit(uint32_t field_id, Proto32Bit value) {
}
return true;
}
void ExecuteServiceArgument::decode(const uint8_t *buffer, size_t length) {
uint32_t count_bool_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 6);
this->bool_array.init(count_bool_array);
uint32_t count_int_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 7);
this->int_array.init(count_int_array);
uint32_t count_float_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 8);
this->float_array.init(count_float_array);
uint32_t count_string_array = ProtoDecodableMessage::count_repeated_field(buffer, length, 9);
this->string_array.init(count_string_array);
ProtoDecodableMessage::decode(buffer, length);
}
bool ExecuteServiceRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 2:
@@ -1096,11 +1003,6 @@ bool ExecuteServiceRequest::decode_32bit(uint32_t field_id, Proto32Bit value) {
}
return true;
}
void ExecuteServiceRequest::decode(const uint8_t *buffer, size_t length) {
uint32_t count_args = ProtoDecodableMessage::count_repeated_field(buffer, length, 2);
this->args.init(count_args);
ProtoDecodableMessage::decode(buffer, length);
}
#endif
#ifdef USE_CAMERA
void ListEntitiesCameraResponse::encode(ProtoWriteBuffer buffer) const {
@@ -1201,7 +1103,6 @@ void ListEntitiesClimateResponse::encode(ProtoWriteBuffer buffer) const {
#ifdef USE_DEVICES
buffer.encode_uint32(26, this->device_id);
#endif
buffer.encode_uint32(27, this->feature_flags);
}
void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
size.add_length(1, this->object_id_ref_.size());
@@ -1256,7 +1157,6 @@ void ListEntitiesClimateResponse::calculate_size(ProtoSize &size) const {
#ifdef USE_DEVICES
size.add_uint32(2, this->device_id);
#endif
size.add_uint32(2, this->feature_flags);
}
void ClimateStateResponse::encode(ProtoWriteBuffer buffer) const {
buffer.encode_fixed32(1, this->key);
@@ -2106,12 +2006,9 @@ bool BluetoothGATTWriteRequest::decode_varint(uint32_t field_id, ProtoVarInt val
}
bool BluetoothGATTWriteRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 4: {
// Use raw data directly to avoid allocation
this->data = value.data();
this->data_len = value.size();
case 4:
this->data = value.as_string();
break;
}
default:
return false;
}
@@ -2145,12 +2042,9 @@ bool BluetoothGATTWriteDescriptorRequest::decode_varint(uint32_t field_id, Proto
}
bool BluetoothGATTWriteDescriptorRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 3: {
// Use raw data directly to avoid allocation
this->data = value.data();
this->data_len = value.size();
case 3:
this->data = value.as_string();
break;
}
default:
return false;
}
@@ -2259,12 +2153,10 @@ void BluetoothDeviceClearCacheResponse::calculate_size(ProtoSize &size) const {
void BluetoothScannerStateResponse::encode(ProtoWriteBuffer buffer) const {
buffer.encode_uint32(1, static_cast<uint32_t>(this->state));
buffer.encode_uint32(2, static_cast<uint32_t>(this->mode));
buffer.encode_uint32(3, static_cast<uint32_t>(this->configured_mode));
}
void BluetoothScannerStateResponse::calculate_size(ProtoSize &size) const {
size.add_uint32(1, static_cast<uint32_t>(this->state));
size.add_uint32(1, static_cast<uint32_t>(this->mode));
size.add_uint32(1, static_cast<uint32_t>(this->configured_mode));
}
bool BluetoothScannerSetModeRequest::decode_varint(uint32_t field_id, ProtoVarInt value) {
switch (field_id) {
@@ -2466,52 +2358,6 @@ void VoiceAssistantWakeWord::calculate_size(ProtoSize &size) const {
}
}
}
bool VoiceAssistantExternalWakeWord::decode_varint(uint32_t field_id, ProtoVarInt value) {
switch (field_id) {
case 5:
this->model_size = value.as_uint32();
break;
default:
return false;
}
return true;
}
bool VoiceAssistantExternalWakeWord::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 1:
this->id = value.as_string();
break;
case 2:
this->wake_word = value.as_string();
break;
case 3:
this->trained_languages.push_back(value.as_string());
break;
case 4:
this->model_type = value.as_string();
break;
case 6:
this->model_hash = value.as_string();
break;
case 7:
this->url = value.as_string();
break;
default:
return false;
}
return true;
}
bool VoiceAssistantConfigurationRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 1:
this->external_wake_words.emplace_back();
value.decode_to_message(this->external_wake_words.back());
break;
default:
return false;
}
return true;
}
void VoiceAssistantConfigurationResponse::encode(ProtoWriteBuffer buffer) const {
for (auto &it : this->available_wake_words) {
buffer.encode_message(1, it, true);
@@ -3155,53 +3001,5 @@ bool UpdateCommandRequest::decode_32bit(uint32_t field_id, Proto32Bit value) {
return true;
}
#endif
#ifdef USE_ZWAVE_PROXY
bool ZWaveProxyFrame::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 1: {
// Use raw data directly to avoid allocation
this->data = value.data();
this->data_len = value.size();
break;
}
default:
return false;
}
return true;
}
void ZWaveProxyFrame::encode(ProtoWriteBuffer buffer) const { buffer.encode_bytes(1, this->data, this->data_len); }
void ZWaveProxyFrame::calculate_size(ProtoSize &size) const { size.add_length(1, this->data_len); }
bool ZWaveProxyRequest::decode_varint(uint32_t field_id, ProtoVarInt value) {
switch (field_id) {
case 1:
this->type = static_cast<enums::ZWaveProxyRequestType>(value.as_uint32());
break;
default:
return false;
}
return true;
}
bool ZWaveProxyRequest::decode_length(uint32_t field_id, ProtoLengthDelimited value) {
switch (field_id) {
case 2: {
// Use raw data directly to avoid allocation
this->data = value.data();
this->data_len = value.size();
break;
}
default:
return false;
}
return true;
}
void ZWaveProxyRequest::encode(ProtoWriteBuffer buffer) const {
buffer.encode_uint32(1, static_cast<uint32_t>(this->type));
buffer.encode_bytes(2, this->data, this->data_len);
}
void ZWaveProxyRequest::calculate_size(ProtoSize &size) const {
size.add_uint32(1, static_cast<uint32_t>(this->type));
size.add_length(2, this->data_len);
}
#endif
} // namespace esphome::api

File diff suppressed because it is too large Load Diff

View File

@@ -655,26 +655,10 @@ template<> const char *proto_enum_to_string<enums::UpdateCommand>(enums::UpdateC
}
}
#endif
#ifdef USE_ZWAVE_PROXY
template<> const char *proto_enum_to_string<enums::ZWaveProxyRequestType>(enums::ZWaveProxyRequestType value) {
switch (value) {
case enums::ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE:
return "ZWAVE_PROXY_REQUEST_TYPE_SUBSCRIBE";
case enums::ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE:
return "ZWAVE_PROXY_REQUEST_TYPE_UNSUBSCRIBE";
case enums::ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE:
return "ZWAVE_PROXY_REQUEST_TYPE_HOME_ID_CHANGE";
default:
return "UNKNOWN";
}
}
#endif
void HelloRequest::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "HelloRequest");
out.append(" client_info: ");
out.append(format_hex_pretty(this->client_info, this->client_info_len));
out.append("\n");
dump_field(out, "client_info", this->client_info);
dump_field(out, "api_version_major", this->api_version_major);
dump_field(out, "api_version_minor", this->api_version_minor);
}
@@ -685,18 +669,8 @@ void HelloResponse::dump_to(std::string &out) const {
dump_field(out, "server_info", this->server_info_ref_);
dump_field(out, "name", this->name_ref_);
}
#ifdef USE_API_PASSWORD
void AuthenticationRequest::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "AuthenticationRequest");
out.append(" password: ");
out.append(format_hex_pretty(this->password, this->password_len));
out.append("\n");
}
void AuthenticationResponse::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "AuthenticationResponse");
dump_field(out, "invalid_password", this->invalid_password);
}
#endif
void ConnectRequest::dump_to(std::string &out) const { dump_field(out, "password", this->password); }
void ConnectResponse::dump_to(std::string &out) const { dump_field(out, "invalid_password", this->invalid_password); }
void DisconnectRequest::dump_to(std::string &out) const { out.append("DisconnectRequest {}"); }
void DisconnectResponse::dump_to(std::string &out) const { out.append("DisconnectResponse {}"); }
void PingRequest::dump_to(std::string &out) const { out.append("PingRequest {}"); }
@@ -775,12 +749,6 @@ void DeviceInfoResponse::dump_to(std::string &out) const {
this->area.dump_to(out);
out.append("\n");
#endif
#ifdef USE_ZWAVE_PROXY
dump_field(out, "zwave_proxy_feature_flags", this->zwave_proxy_feature_flags);
#endif
#ifdef USE_ZWAVE_PROXY
dump_field(out, "zwave_home_id", this->zwave_home_id);
#endif
}
void ListEntitiesRequest::dump_to(std::string &out) const { out.append("ListEntitiesRequest {}"); }
void ListEntitiesDoneResponse::dump_to(std::string &out) const { out.append("ListEntitiesDoneResponse {}"); }
@@ -1103,8 +1071,8 @@ void HomeassistantServiceMap::dump_to(std::string &out) const {
dump_field(out, "key", this->key_ref_);
dump_field(out, "value", this->value);
}
void HomeassistantActionRequest::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "HomeassistantActionRequest");
void HomeassistantServiceResponse::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "HomeassistantServiceResponse");
dump_field(out, "service", this->service_ref_);
for (const auto &it : this->data) {
out.append(" data: ");
@@ -1122,28 +1090,6 @@ void HomeassistantActionRequest::dump_to(std::string &out) const {
out.append("\n");
}
dump_field(out, "is_event", this->is_event);
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
dump_field(out, "call_id", this->call_id);
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
dump_field(out, "wants_response", this->wants_response);
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
dump_field(out, "response_template", this->response_template);
#endif
}
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
void HomeassistantActionResponse::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "HomeassistantActionResponse");
dump_field(out, "call_id", this->call_id);
dump_field(out, "success", this->success);
dump_field(out, "error_message", this->error_message);
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
out.append(" response_data: ");
out.append(format_hex_pretty(this->response_data, this->response_data_len));
out.append("\n");
#endif
}
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
@@ -1164,13 +1110,7 @@ void HomeAssistantStateResponse::dump_to(std::string &out) const {
}
#endif
void GetTimeRequest::dump_to(std::string &out) const { out.append("GetTimeRequest {}"); }
void GetTimeResponse::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "GetTimeResponse");
dump_field(out, "epoch_seconds", this->epoch_seconds);
out.append(" timezone: ");
out.append(format_hex_pretty(this->timezone, this->timezone_len));
out.append("\n");
}
void GetTimeResponse::dump_to(std::string &out) const { dump_field(out, "epoch_seconds", this->epoch_seconds); }
#ifdef USE_API_SERVICES
void ListEntitiesServicesArgument::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "ListEntitiesServicesArgument");
@@ -1195,7 +1135,7 @@ void ExecuteServiceArgument::dump_to(std::string &out) const {
dump_field(out, "string_", this->string_);
dump_field(out, "int_", this->int_);
for (const auto it : this->bool_array) {
dump_field(out, "bool_array", static_cast<bool>(it), 4);
dump_field(out, "bool_array", it, 4);
}
for (const auto &it : this->int_array) {
dump_field(out, "int_array", it, 4);
@@ -1292,7 +1232,6 @@ void ListEntitiesClimateResponse::dump_to(std::string &out) const {
#ifdef USE_DEVICES
dump_field(out, "device_id", this->device_id);
#endif
dump_field(out, "feature_flags", this->feature_flags);
}
void ClimateStateResponse::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "ClimateStateResponse");
@@ -1683,7 +1622,7 @@ void BluetoothGATTWriteRequest::dump_to(std::string &out) const {
dump_field(out, "handle", this->handle);
dump_field(out, "response", this->response);
out.append(" data: ");
out.append(format_hex_pretty(this->data, this->data_len));
out.append(format_hex_pretty(reinterpret_cast<const uint8_t *>(this->data.data()), this->data.size()));
out.append("\n");
}
void BluetoothGATTReadDescriptorRequest::dump_to(std::string &out) const {
@@ -1696,7 +1635,7 @@ void BluetoothGATTWriteDescriptorRequest::dump_to(std::string &out) const {
dump_field(out, "address", this->address);
dump_field(out, "handle", this->handle);
out.append(" data: ");
out.append(format_hex_pretty(this->data, this->data_len));
out.append(format_hex_pretty(reinterpret_cast<const uint8_t *>(this->data.data()), this->data.size()));
out.append("\n");
}
void BluetoothGATTNotifyRequest::dump_to(std::string &out) const {
@@ -1765,7 +1704,6 @@ void BluetoothScannerStateResponse::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "BluetoothScannerStateResponse");
dump_field(out, "state", static_cast<enums::BluetoothScannerState>(this->state));
dump_field(out, "mode", static_cast<enums::BluetoothScannerMode>(this->mode));
dump_field(out, "configured_mode", static_cast<enums::BluetoothScannerMode>(this->configured_mode));
}
void BluetoothScannerSetModeRequest::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "BluetoothScannerSetModeRequest");
@@ -1849,25 +1787,8 @@ void VoiceAssistantWakeWord::dump_to(std::string &out) const {
dump_field(out, "trained_languages", it, 4);
}
}
void VoiceAssistantExternalWakeWord::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "VoiceAssistantExternalWakeWord");
dump_field(out, "id", this->id);
dump_field(out, "wake_word", this->wake_word);
for (const auto &it : this->trained_languages) {
dump_field(out, "trained_languages", it, 4);
}
dump_field(out, "model_type", this->model_type);
dump_field(out, "model_size", this->model_size);
dump_field(out, "model_hash", this->model_hash);
dump_field(out, "url", this->url);
}
void VoiceAssistantConfigurationRequest::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "VoiceAssistantConfigurationRequest");
for (const auto &it : this->external_wake_words) {
out.append(" external_wake_words: ");
it.dump_to(out);
out.append("\n");
}
out.append("VoiceAssistantConfigurationRequest {}");
}
void VoiceAssistantConfigurationResponse::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "VoiceAssistantConfigurationResponse");
@@ -2176,21 +2097,6 @@ void UpdateCommandRequest::dump_to(std::string &out) const {
#endif
}
#endif
#ifdef USE_ZWAVE_PROXY
void ZWaveProxyFrame::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "ZWaveProxyFrame");
out.append(" data: ");
out.append(format_hex_pretty(this->data, this->data_len));
out.append("\n");
}
void ZWaveProxyRequest::dump_to(std::string &out) const {
MessageDumpHelper helper(out, "ZWaveProxyRequest");
dump_field(out, "type", static_cast<enums::ZWaveProxyRequestType>(this->type));
out.append(" data: ");
out.append(format_hex_pretty(this->data, this->data_len));
out.append("\n");
}
#endif
} // namespace esphome::api

View File

@@ -24,17 +24,15 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
this->on_hello_request(msg);
break;
}
#ifdef USE_API_PASSWORD
case AuthenticationRequest::MESSAGE_TYPE: {
AuthenticationRequest msg;
case ConnectRequest::MESSAGE_TYPE: {
ConnectRequest msg;
msg.decode(msg_data, msg_size);
#ifdef HAS_PROTO_MESSAGE_DUMP
ESP_LOGVV(TAG, "on_authentication_request: %s", msg.dump().c_str());
ESP_LOGVV(TAG, "on_connect_request: %s", msg.dump().c_str());
#endif
this->on_authentication_request(msg);
this->on_connect_request(msg);
break;
}
#endif
case DisconnectRequest::MESSAGE_TYPE: {
DisconnectRequest msg;
// Empty message: no decode needed
@@ -162,6 +160,15 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
break;
}
#endif
case GetTimeRequest::MESSAGE_TYPE: {
GetTimeRequest msg;
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
ESP_LOGVV(TAG, "on_get_time_request: %s", msg.dump().c_str());
#endif
this->on_get_time_request(msg);
break;
}
case GetTimeResponse::MESSAGE_TYPE: {
GetTimeResponse msg;
msg.decode(msg_data, msg_size);
@@ -548,7 +555,7 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
#ifdef USE_VOICE_ASSISTANT
case VoiceAssistantConfigurationRequest::MESSAGE_TYPE: {
VoiceAssistantConfigurationRequest msg;
msg.decode(msg_data, msg_size);
// Empty message: no decode needed
#ifdef HAS_PROTO_MESSAGE_DUMP
ESP_LOGVV(TAG, "on_voice_assistant_configuration_request: %s", msg.dump().c_str());
#endif
@@ -588,39 +595,6 @@ void APIServerConnectionBase::read_message(uint32_t msg_size, uint32_t msg_type,
this->on_bluetooth_scanner_set_mode_request(msg);
break;
}
#endif
#ifdef USE_ZWAVE_PROXY
case ZWaveProxyFrame::MESSAGE_TYPE: {
ZWaveProxyFrame msg;
msg.decode(msg_data, msg_size);
#ifdef HAS_PROTO_MESSAGE_DUMP
ESP_LOGVV(TAG, "on_z_wave_proxy_frame: %s", msg.dump().c_str());
#endif
this->on_z_wave_proxy_frame(msg);
break;
}
#endif
#ifdef USE_ZWAVE_PROXY
case ZWaveProxyRequest::MESSAGE_TYPE: {
ZWaveProxyRequest msg;
msg.decode(msg_data, msg_size);
#ifdef HAS_PROTO_MESSAGE_DUMP
ESP_LOGVV(TAG, "on_z_wave_proxy_request: %s", msg.dump().c_str());
#endif
this->on_z_wave_proxy_request(msg);
break;
}
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
case HomeassistantActionResponse::MESSAGE_TYPE: {
HomeassistantActionResponse msg;
msg.decode(msg_data, msg_size);
#ifdef HAS_PROTO_MESSAGE_DUMP
ESP_LOGVV(TAG, "on_homeassistant_action_response: %s", msg.dump().c_str());
#endif
this->on_homeassistant_action_response(msg);
break;
}
#endif
default:
break;
@@ -632,13 +606,11 @@ void APIServerConnection::on_hello_request(const HelloRequest &msg) {
this->on_fatal_error();
}
}
#ifdef USE_API_PASSWORD
void APIServerConnection::on_authentication_request(const AuthenticationRequest &msg) {
if (!this->send_authenticate_response(msg)) {
void APIServerConnection::on_connect_request(const ConnectRequest &msg) {
if (!this->send_connect_response(msg)) {
this->on_fatal_error();
}
}
#endif
void APIServerConnection::on_disconnect_request(const DisconnectRequest &msg) {
if (!this->send_disconnect_response(msg)) {
this->on_fatal_error();
@@ -650,139 +622,246 @@ void APIServerConnection::on_ping_request(const PingRequest &msg) {
}
}
void APIServerConnection::on_device_info_request(const DeviceInfoRequest &msg) {
if (!this->send_device_info_response(msg)) {
if (this->check_connection_setup_() && !this->send_device_info_response(msg)) {
this->on_fatal_error();
}
}
void APIServerConnection::on_list_entities_request(const ListEntitiesRequest &msg) { this->list_entities(msg); }
void APIServerConnection::on_subscribe_states_request(const SubscribeStatesRequest &msg) {
this->subscribe_states(msg);
void APIServerConnection::on_list_entities_request(const ListEntitiesRequest &msg) {
if (this->check_authenticated_()) {
this->list_entities(msg);
}
}
void APIServerConnection::on_subscribe_states_request(const SubscribeStatesRequest &msg) {
if (this->check_authenticated_()) {
this->subscribe_states(msg);
}
}
void APIServerConnection::on_subscribe_logs_request(const SubscribeLogsRequest &msg) {
if (this->check_authenticated_()) {
this->subscribe_logs(msg);
}
}
void APIServerConnection::on_subscribe_logs_request(const SubscribeLogsRequest &msg) { this->subscribe_logs(msg); }
#ifdef USE_API_HOMEASSISTANT_SERVICES
void APIServerConnection::on_subscribe_homeassistant_services_request(
const SubscribeHomeassistantServicesRequest &msg) {
this->subscribe_homeassistant_services(msg);
if (this->check_authenticated_()) {
this->subscribe_homeassistant_services(msg);
}
}
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
void APIServerConnection::on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &msg) {
this->subscribe_home_assistant_states(msg);
if (this->check_authenticated_()) {
this->subscribe_home_assistant_states(msg);
}
}
#endif
void APIServerConnection::on_get_time_request(const GetTimeRequest &msg) {
if (this->check_connection_setup_() && !this->send_get_time_response(msg)) {
this->on_fatal_error();
}
}
#ifdef USE_API_SERVICES
void APIServerConnection::on_execute_service_request(const ExecuteServiceRequest &msg) { this->execute_service(msg); }
void APIServerConnection::on_execute_service_request(const ExecuteServiceRequest &msg) {
if (this->check_authenticated_()) {
this->execute_service(msg);
}
}
#endif
#ifdef USE_API_NOISE
void APIServerConnection::on_noise_encryption_set_key_request(const NoiseEncryptionSetKeyRequest &msg) {
if (!this->send_noise_encryption_set_key_response(msg)) {
if (this->check_authenticated_() && !this->send_noise_encryption_set_key_response(msg)) {
this->on_fatal_error();
}
}
#endif
#ifdef USE_BUTTON
void APIServerConnection::on_button_command_request(const ButtonCommandRequest &msg) { this->button_command(msg); }
void APIServerConnection::on_button_command_request(const ButtonCommandRequest &msg) {
if (this->check_authenticated_()) {
this->button_command(msg);
}
}
#endif
#ifdef USE_CAMERA
void APIServerConnection::on_camera_image_request(const CameraImageRequest &msg) { this->camera_image(msg); }
void APIServerConnection::on_camera_image_request(const CameraImageRequest &msg) {
if (this->check_authenticated_()) {
this->camera_image(msg);
}
}
#endif
#ifdef USE_CLIMATE
void APIServerConnection::on_climate_command_request(const ClimateCommandRequest &msg) { this->climate_command(msg); }
void APIServerConnection::on_climate_command_request(const ClimateCommandRequest &msg) {
if (this->check_authenticated_()) {
this->climate_command(msg);
}
}
#endif
#ifdef USE_COVER
void APIServerConnection::on_cover_command_request(const CoverCommandRequest &msg) { this->cover_command(msg); }
void APIServerConnection::on_cover_command_request(const CoverCommandRequest &msg) {
if (this->check_authenticated_()) {
this->cover_command(msg);
}
}
#endif
#ifdef USE_DATETIME_DATE
void APIServerConnection::on_date_command_request(const DateCommandRequest &msg) { this->date_command(msg); }
void APIServerConnection::on_date_command_request(const DateCommandRequest &msg) {
if (this->check_authenticated_()) {
this->date_command(msg);
}
}
#endif
#ifdef USE_DATETIME_DATETIME
void APIServerConnection::on_date_time_command_request(const DateTimeCommandRequest &msg) {
this->datetime_command(msg);
if (this->check_authenticated_()) {
this->datetime_command(msg);
}
}
#endif
#ifdef USE_FAN
void APIServerConnection::on_fan_command_request(const FanCommandRequest &msg) { this->fan_command(msg); }
void APIServerConnection::on_fan_command_request(const FanCommandRequest &msg) {
if (this->check_authenticated_()) {
this->fan_command(msg);
}
}
#endif
#ifdef USE_LIGHT
void APIServerConnection::on_light_command_request(const LightCommandRequest &msg) { this->light_command(msg); }
void APIServerConnection::on_light_command_request(const LightCommandRequest &msg) {
if (this->check_authenticated_()) {
this->light_command(msg);
}
}
#endif
#ifdef USE_LOCK
void APIServerConnection::on_lock_command_request(const LockCommandRequest &msg) { this->lock_command(msg); }
void APIServerConnection::on_lock_command_request(const LockCommandRequest &msg) {
if (this->check_authenticated_()) {
this->lock_command(msg);
}
}
#endif
#ifdef USE_MEDIA_PLAYER
void APIServerConnection::on_media_player_command_request(const MediaPlayerCommandRequest &msg) {
this->media_player_command(msg);
if (this->check_authenticated_()) {
this->media_player_command(msg);
}
}
#endif
#ifdef USE_NUMBER
void APIServerConnection::on_number_command_request(const NumberCommandRequest &msg) { this->number_command(msg); }
void APIServerConnection::on_number_command_request(const NumberCommandRequest &msg) {
if (this->check_authenticated_()) {
this->number_command(msg);
}
}
#endif
#ifdef USE_SELECT
void APIServerConnection::on_select_command_request(const SelectCommandRequest &msg) { this->select_command(msg); }
void APIServerConnection::on_select_command_request(const SelectCommandRequest &msg) {
if (this->check_authenticated_()) {
this->select_command(msg);
}
}
#endif
#ifdef USE_SIREN
void APIServerConnection::on_siren_command_request(const SirenCommandRequest &msg) { this->siren_command(msg); }
void APIServerConnection::on_siren_command_request(const SirenCommandRequest &msg) {
if (this->check_authenticated_()) {
this->siren_command(msg);
}
}
#endif
#ifdef USE_SWITCH
void APIServerConnection::on_switch_command_request(const SwitchCommandRequest &msg) { this->switch_command(msg); }
void APIServerConnection::on_switch_command_request(const SwitchCommandRequest &msg) {
if (this->check_authenticated_()) {
this->switch_command(msg);
}
}
#endif
#ifdef USE_TEXT
void APIServerConnection::on_text_command_request(const TextCommandRequest &msg) { this->text_command(msg); }
void APIServerConnection::on_text_command_request(const TextCommandRequest &msg) {
if (this->check_authenticated_()) {
this->text_command(msg);
}
}
#endif
#ifdef USE_DATETIME_TIME
void APIServerConnection::on_time_command_request(const TimeCommandRequest &msg) { this->time_command(msg); }
void APIServerConnection::on_time_command_request(const TimeCommandRequest &msg) {
if (this->check_authenticated_()) {
this->time_command(msg);
}
}
#endif
#ifdef USE_UPDATE
void APIServerConnection::on_update_command_request(const UpdateCommandRequest &msg) { this->update_command(msg); }
void APIServerConnection::on_update_command_request(const UpdateCommandRequest &msg) {
if (this->check_authenticated_()) {
this->update_command(msg);
}
}
#endif
#ifdef USE_VALVE
void APIServerConnection::on_valve_command_request(const ValveCommandRequest &msg) { this->valve_command(msg); }
void APIServerConnection::on_valve_command_request(const ValveCommandRequest &msg) {
if (this->check_authenticated_()) {
this->valve_command(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_subscribe_bluetooth_le_advertisements_request(
const SubscribeBluetoothLEAdvertisementsRequest &msg) {
this->subscribe_bluetooth_le_advertisements(msg);
if (this->check_authenticated_()) {
this->subscribe_bluetooth_le_advertisements(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_bluetooth_device_request(const BluetoothDeviceRequest &msg) {
this->bluetooth_device_request(msg);
if (this->check_authenticated_()) {
this->bluetooth_device_request(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_bluetooth_gatt_get_services_request(const BluetoothGATTGetServicesRequest &msg) {
this->bluetooth_gatt_get_services(msg);
if (this->check_authenticated_()) {
this->bluetooth_gatt_get_services(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_bluetooth_gatt_read_request(const BluetoothGATTReadRequest &msg) {
this->bluetooth_gatt_read(msg);
if (this->check_authenticated_()) {
this->bluetooth_gatt_read(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_bluetooth_gatt_write_request(const BluetoothGATTWriteRequest &msg) {
this->bluetooth_gatt_write(msg);
if (this->check_authenticated_()) {
this->bluetooth_gatt_write(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_bluetooth_gatt_read_descriptor_request(const BluetoothGATTReadDescriptorRequest &msg) {
this->bluetooth_gatt_read_descriptor(msg);
if (this->check_authenticated_()) {
this->bluetooth_gatt_read_descriptor(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_bluetooth_gatt_write_descriptor_request(const BluetoothGATTWriteDescriptorRequest &msg) {
this->bluetooth_gatt_write_descriptor(msg);
if (this->check_authenticated_()) {
this->bluetooth_gatt_write_descriptor(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_bluetooth_gatt_notify_request(const BluetoothGATTNotifyRequest &msg) {
this->bluetooth_gatt_notify(msg);
if (this->check_authenticated_()) {
this->bluetooth_gatt_notify(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_subscribe_bluetooth_connections_free_request(
const SubscribeBluetoothConnectionsFreeRequest &msg) {
if (!this->send_subscribe_bluetooth_connections_free_response(msg)) {
if (this->check_authenticated_() && !this->send_subscribe_bluetooth_connections_free_response(msg)) {
this->on_fatal_error();
}
}
@@ -790,68 +869,45 @@ void APIServerConnection::on_subscribe_bluetooth_connections_free_request(
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_unsubscribe_bluetooth_le_advertisements_request(
const UnsubscribeBluetoothLEAdvertisementsRequest &msg) {
this->unsubscribe_bluetooth_le_advertisements(msg);
if (this->check_authenticated_()) {
this->unsubscribe_bluetooth_le_advertisements(msg);
}
}
#endif
#ifdef USE_BLUETOOTH_PROXY
void APIServerConnection::on_bluetooth_scanner_set_mode_request(const BluetoothScannerSetModeRequest &msg) {
this->bluetooth_scanner_set_mode(msg);
if (this->check_authenticated_()) {
this->bluetooth_scanner_set_mode(msg);
}
}
#endif
#ifdef USE_VOICE_ASSISTANT
void APIServerConnection::on_subscribe_voice_assistant_request(const SubscribeVoiceAssistantRequest &msg) {
this->subscribe_voice_assistant(msg);
if (this->check_authenticated_()) {
this->subscribe_voice_assistant(msg);
}
}
#endif
#ifdef USE_VOICE_ASSISTANT
void APIServerConnection::on_voice_assistant_configuration_request(const VoiceAssistantConfigurationRequest &msg) {
if (!this->send_voice_assistant_get_configuration_response(msg)) {
if (this->check_authenticated_() && !this->send_voice_assistant_get_configuration_response(msg)) {
this->on_fatal_error();
}
}
#endif
#ifdef USE_VOICE_ASSISTANT
void APIServerConnection::on_voice_assistant_set_configuration(const VoiceAssistantSetConfiguration &msg) {
this->voice_assistant_set_configuration(msg);
if (this->check_authenticated_()) {
this->voice_assistant_set_configuration(msg);
}
}
#endif
#ifdef USE_ALARM_CONTROL_PANEL
void APIServerConnection::on_alarm_control_panel_command_request(const AlarmControlPanelCommandRequest &msg) {
this->alarm_control_panel_command(msg);
}
#endif
#ifdef USE_ZWAVE_PROXY
void APIServerConnection::on_z_wave_proxy_frame(const ZWaveProxyFrame &msg) { this->zwave_proxy_frame(msg); }
#endif
#ifdef USE_ZWAVE_PROXY
void APIServerConnection::on_z_wave_proxy_request(const ZWaveProxyRequest &msg) { this->zwave_proxy_request(msg); }
#endif
void APIServerConnection::read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) {
// Check authentication/connection requirements for messages
switch (msg_type) {
case HelloRequest::MESSAGE_TYPE: // No setup required
#ifdef USE_API_PASSWORD
case AuthenticationRequest::MESSAGE_TYPE: // No setup required
#endif
case DisconnectRequest::MESSAGE_TYPE: // No setup required
case PingRequest::MESSAGE_TYPE: // No setup required
break; // Skip all checks for these messages
case DeviceInfoRequest::MESSAGE_TYPE: // Connection setup only
if (!this->check_connection_setup_()) {
return; // Connection not setup
}
break;
default:
// All other messages require authentication (which includes connection check)
if (!this->check_authenticated_()) {
return; // Authentication failed
}
break;
if (this->check_authenticated_()) {
this->alarm_control_panel_command(msg);
}
// Call base implementation to process the message
APIServerConnectionBase::read_message(msg_size, msg_type, msg_data);
}
#endif
} // namespace esphome::api

View File

@@ -26,9 +26,7 @@ class APIServerConnectionBase : public ProtoService {
virtual void on_hello_request(const HelloRequest &value){};
#ifdef USE_API_PASSWORD
virtual void on_authentication_request(const AuthenticationRequest &value){};
#endif
virtual void on_connect_request(const ConnectRequest &value){};
virtual void on_disconnect_request(const DisconnectRequest &value){};
virtual void on_disconnect_response(const DisconnectResponse &value){};
@@ -66,9 +64,6 @@ class APIServerConnectionBase : public ProtoService {
virtual void on_subscribe_homeassistant_services_request(const SubscribeHomeassistantServicesRequest &value){};
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
virtual void on_homeassistant_action_response(const HomeassistantActionResponse &value){};
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
virtual void on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &value){};
#endif
@@ -76,7 +71,7 @@ class APIServerConnectionBase : public ProtoService {
#ifdef USE_API_HOMEASSISTANT_STATES
virtual void on_home_assistant_state_response(const HomeAssistantStateResponse &value){};
#endif
virtual void on_get_time_request(const GetTimeRequest &value){};
virtual void on_get_time_response(const GetTimeResponse &value){};
#ifdef USE_API_SERVICES
@@ -210,12 +205,6 @@ class APIServerConnectionBase : public ProtoService {
#ifdef USE_UPDATE
virtual void on_update_command_request(const UpdateCommandRequest &value){};
#endif
#ifdef USE_ZWAVE_PROXY
virtual void on_z_wave_proxy_frame(const ZWaveProxyFrame &value){};
#endif
#ifdef USE_ZWAVE_PROXY
virtual void on_z_wave_proxy_request(const ZWaveProxyRequest &value){};
#endif
protected:
void read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) override;
@@ -224,9 +213,7 @@ class APIServerConnectionBase : public ProtoService {
class APIServerConnection : public APIServerConnectionBase {
public:
virtual bool send_hello_response(const HelloRequest &msg) = 0;
#ifdef USE_API_PASSWORD
virtual bool send_authenticate_response(const AuthenticationRequest &msg) = 0;
#endif
virtual bool send_connect_response(const ConnectRequest &msg) = 0;
virtual bool send_disconnect_response(const DisconnectRequest &msg) = 0;
virtual bool send_ping_response(const PingRequest &msg) = 0;
virtual bool send_device_info_response(const DeviceInfoRequest &msg) = 0;
@@ -239,6 +226,7 @@ class APIServerConnection : public APIServerConnectionBase {
#ifdef USE_API_HOMEASSISTANT_STATES
virtual void subscribe_home_assistant_states(const SubscribeHomeAssistantStatesRequest &msg) = 0;
#endif
virtual bool send_get_time_response(const GetTimeRequest &msg) = 0;
#ifdef USE_API_SERVICES
virtual void execute_service(const ExecuteServiceRequest &msg) = 0;
#endif
@@ -344,18 +332,10 @@ class APIServerConnection : public APIServerConnectionBase {
#endif
#ifdef USE_ALARM_CONTROL_PANEL
virtual void alarm_control_panel_command(const AlarmControlPanelCommandRequest &msg) = 0;
#endif
#ifdef USE_ZWAVE_PROXY
virtual void zwave_proxy_frame(const ZWaveProxyFrame &msg) = 0;
#endif
#ifdef USE_ZWAVE_PROXY
virtual void zwave_proxy_request(const ZWaveProxyRequest &msg) = 0;
#endif
protected:
void on_hello_request(const HelloRequest &msg) override;
#ifdef USE_API_PASSWORD
void on_authentication_request(const AuthenticationRequest &msg) override;
#endif
void on_connect_request(const ConnectRequest &msg) override;
void on_disconnect_request(const DisconnectRequest &msg) override;
void on_ping_request(const PingRequest &msg) override;
void on_device_info_request(const DeviceInfoRequest &msg) override;
@@ -368,6 +348,7 @@ class APIServerConnection : public APIServerConnectionBase {
#ifdef USE_API_HOMEASSISTANT_STATES
void on_subscribe_home_assistant_states_request(const SubscribeHomeAssistantStatesRequest &msg) override;
#endif
void on_get_time_request(const GetTimeRequest &msg) override;
#ifdef USE_API_SERVICES
void on_execute_service_request(const ExecuteServiceRequest &msg) override;
#endif
@@ -474,13 +455,6 @@ class APIServerConnection : public APIServerConnectionBase {
#ifdef USE_ALARM_CONTROL_PANEL
void on_alarm_control_panel_command_request(const AlarmControlPanelCommandRequest &msg) override;
#endif
#ifdef USE_ZWAVE_PROXY
void on_z_wave_proxy_frame(const ZWaveProxyFrame &msg) override;
#endif
#ifdef USE_ZWAVE_PROXY
void on_z_wave_proxy_request(const ZWaveProxyRequest &msg) override;
#endif
void read_message(uint32_t msg_size, uint32_t msg_type, uint8_t *msg_data) override;
};
} // namespace esphome::api

View File

@@ -9,16 +9,12 @@
#include "esphome/core/log.h"
#include "esphome/core/util.h"
#include "esphome/core/version.h"
#ifdef USE_API_HOMEASSISTANT_SERVICES
#include "homeassistant_service.h"
#endif
#ifdef USE_LOGGER
#include "esphome/components/logger/logger.h"
#endif
#include <algorithm>
#include <utility>
namespace esphome::api {
@@ -41,14 +37,12 @@ void APIServer::setup() {
this->noise_pref_ = global_preferences->make_preference<SavedNoisePsk>(hash, true);
#ifndef USE_API_NOISE_PSK_FROM_YAML
// Only load saved PSK if not set from YAML
SavedNoisePsk noise_pref_saved{};
if (this->noise_pref_.load(&noise_pref_saved)) {
ESP_LOGD(TAG, "Loaded saved Noise PSK");
this->set_noise_psk(noise_pref_saved.psk);
}
#endif
#endif
// Schedule reboot if no clients connect within timeout
@@ -91,7 +85,7 @@ void APIServer::setup() {
return;
}
err = this->socket_->listen(this->listen_backlog_);
err = this->socket_->listen(4);
if (err != 0) {
ESP_LOGW(TAG, "Socket unable to listen: errno %d", errno);
this->mark_failed();
@@ -144,19 +138,9 @@ void APIServer::loop() {
while (true) {
struct sockaddr_storage source_addr;
socklen_t addr_len = sizeof(source_addr);
auto sock = this->socket_->accept_loop_monitored((struct sockaddr *) &source_addr, &addr_len);
if (!sock)
break;
// Check if we're at the connection limit
if (this->clients_.size() >= this->max_connections_) {
ESP_LOGW(TAG, "Max connections (%d), rejecting %s", this->max_connections_, sock->getpeername().c_str());
// Immediately close - socket destructor will handle cleanup
sock.reset();
continue;
}
ESP_LOGD(TAG, "Accept %s", sock->getpeername().c_str());
auto *conn = new APIConnection(std::move(sock), this);
@@ -181,8 +165,7 @@ void APIServer::loop() {
// Network is down - disconnect all clients
for (auto &client : this->clients_) {
client->on_fatal_error();
ESP_LOGW(TAG, "%s (%s): Network down; disconnect", client->client_info_.name.c_str(),
client->client_info_.peername.c_str());
ESP_LOGW(TAG, "%s: Network down; disconnect", client->get_client_combined_info().c_str());
}
// Continue to process and clean up the clients below
}
@@ -221,10 +204,8 @@ void APIServer::loop() {
void APIServer::dump_config() {
ESP_LOGCONFIG(TAG,
"Server:\n"
" Address: %s:%u\n"
" Listen backlog: %u\n"
" Max connections: %u",
network::get_use_address().c_str(), this->port_, this->listen_backlog_, this->max_connections_);
" Address: %s:%u",
network::get_use_address().c_str(), this->port_);
#ifdef USE_API_NOISE
ESP_LOGCONFIG(TAG, " Noise encryption: %s", YESNO(this->noise_ctx_->has_psk()));
if (!this->noise_ctx_->has_psk()) {
@@ -236,12 +217,12 @@ void APIServer::dump_config() {
}
#ifdef USE_API_PASSWORD
bool APIServer::check_password(const uint8_t *password_data, size_t password_len) const {
bool APIServer::check_password(const std::string &password) const {
// depend only on input password length
const char *a = this->password_.c_str();
uint32_t len_a = this->password_.length();
const char *b = reinterpret_cast<const char *>(password_data);
uint32_t len_b = password_len;
const char *b = password.c_str();
uint32_t len_b = password.length();
// disable optimization with volatile
volatile uint32_t length = len_b;
@@ -264,7 +245,6 @@ bool APIServer::check_password(const uint8_t *password_data, size_t password_len
return result == 0;
}
#endif
void APIServer::handle_disconnect(APIConnection *conn) {}
@@ -375,15 +355,6 @@ void APIServer::on_update(update::UpdateEntity *obj) {
}
#endif
#ifdef USE_ZWAVE_PROXY
void APIServer::on_zwave_proxy_request(const esphome::api::ProtoMessage &msg) {
// We could add code to manage a second subscription type, but, since this message type is
// very infrequent and small, we simply send it to all clients
for (auto &c : this->clients_)
c->send_message(msg, api::ZWaveProxyRequest::MESSAGE_TYPE);
}
#endif
#ifdef USE_ALARM_CONTROL_PANEL
API_DISPATCH_UPDATE(alarm_control_panel::AlarmControlPanel, alarm_control_panel)
#endif
@@ -399,43 +370,12 @@ void APIServer::set_password(const std::string &password) { this->password_ = pa
void APIServer::set_batch_delay(uint16_t batch_delay) { this->batch_delay_ = batch_delay; }
#ifdef USE_API_HOMEASSISTANT_SERVICES
void APIServer::send_homeassistant_action(const HomeassistantActionRequest &call) {
void APIServer::send_homeassistant_service_call(const HomeassistantServiceResponse &call) {
for (auto &client : this->clients_) {
client->send_homeassistant_action(call);
client->send_homeassistant_service_call(call);
}
}
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
void APIServer::register_action_response_callback(uint32_t call_id, ActionResponseCallback callback) {
this->action_response_callbacks_.push_back({call_id, std::move(callback)});
}
void APIServer::handle_action_response(uint32_t call_id, bool success, const std::string &error_message) {
for (auto it = this->action_response_callbacks_.begin(); it != this->action_response_callbacks_.end(); ++it) {
if (it->call_id == call_id) {
auto callback = std::move(it->callback);
this->action_response_callbacks_.erase(it);
ActionResponse response(success, error_message);
callback(response);
return;
}
}
}
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
void APIServer::handle_action_response(uint32_t call_id, bool success, const std::string &error_message,
const uint8_t *response_data, size_t response_data_len) {
for (auto it = this->action_response_callbacks_.begin(); it != this->action_response_callbacks_.end(); ++it) {
if (it->call_id == call_id) {
auto callback = std::move(it->callback);
this->action_response_callbacks_.erase(it);
ActionResponse response(success, error_message, response_data, response_data_len);
callback(response);
return;
}
}
}
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
#endif // USE_API_HOMEASSISTANT_SERVICES
#endif
#ifdef USE_API_HOMEASSISTANT_STATES
void APIServer::subscribe_home_assistant_state(std::string entity_id, optional<std::string> attribute,
@@ -469,12 +409,6 @@ void APIServer::set_reboot_timeout(uint32_t reboot_timeout) { this->reboot_timeo
#ifdef USE_API_NOISE
bool APIServer::save_noise_psk(psk_t psk, bool make_active) {
#ifdef USE_API_NOISE_PSK_FROM_YAML
// When PSK is set from YAML, this function should never be called
// but if it is, reject the change
ESP_LOGW(TAG, "Key set in YAML");
return false;
#else
auto &old_psk = this->noise_ctx_->get_psk();
if (std::equal(old_psk.begin(), old_psk.end(), psk.begin())) {
ESP_LOGW(TAG, "New PSK matches old");
@@ -503,7 +437,6 @@ bool APIServer::save_noise_psk(psk_t psk, bool make_active) {
});
}
return true;
#endif
}
#endif

View File

@@ -16,7 +16,6 @@
#include "user_services.h"
#endif
#include <map>
#include <vector>
namespace esphome::api {
@@ -38,15 +37,13 @@ class APIServer : public Component, public Controller {
void on_shutdown() override;
bool teardown() override;
#ifdef USE_API_PASSWORD
bool check_password(const uint8_t *password_data, size_t password_len) const;
bool check_password(const std::string &password) const;
void set_password(const std::string &password);
#endif
void set_port(uint16_t port);
void set_reboot_timeout(uint32_t reboot_timeout);
void set_batch_delay(uint16_t batch_delay);
uint16_t get_batch_delay() const { return batch_delay_; }
void set_listen_backlog(uint8_t listen_backlog) { this->listen_backlog_ = listen_backlog; }
void set_max_connections(uint8_t max_connections) { this->max_connections_ = max_connections; }
// Get reference to shared buffer for API connections
std::vector<uint8_t> &get_shared_buffer_ref() { return shared_write_buffer_; }
@@ -110,19 +107,8 @@ class APIServer : public Component, public Controller {
void on_media_player_update(media_player::MediaPlayer *obj) override;
#endif
#ifdef USE_API_HOMEASSISTANT_SERVICES
void send_homeassistant_action(const HomeassistantActionRequest &call);
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
// Action response handling
using ActionResponseCallback = std::function<void(const class ActionResponse &)>;
void register_action_response_callback(uint32_t call_id, ActionResponseCallback callback);
void handle_action_response(uint32_t call_id, bool success, const std::string &error_message);
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
void handle_action_response(uint32_t call_id, bool success, const std::string &error_message,
const uint8_t *response_data, size_t response_data_len);
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
#endif // USE_API_HOMEASSISTANT_SERVICES
void send_homeassistant_service_call(const HomeassistantServiceResponse &call);
#endif
#ifdef USE_API_SERVICES
void register_user_service(UserServiceDescriptor *descriptor) { this->user_services_.push_back(descriptor); }
#endif
@@ -139,9 +125,6 @@ class APIServer : public Component, public Controller {
#ifdef USE_UPDATE
void on_update(update::UpdateEntity *obj) override;
#endif
#ifdef USE_ZWAVE_PROXY
void on_zwave_proxy_request(const esphome::api::ProtoMessage &msg);
#endif
bool is_connected() const;
@@ -198,23 +181,12 @@ class APIServer : public Component, public Controller {
#ifdef USE_API_SERVICES
std::vector<UserServiceDescriptor *> user_services_;
#endif
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
struct PendingActionResponse {
uint32_t call_id;
ActionResponseCallback callback;
};
std::vector<PendingActionResponse> action_response_callbacks_;
#endif
// Group smaller types together
uint16_t port_{6053};
uint16_t batch_delay_{100};
// Connection limits - these defaults will be overridden by config values
// from cv.SplitDefault in __init__.py which sets platform-specific defaults
uint8_t listen_backlog_{4};
uint8_t max_connections_{8};
bool shutting_down_ = false;
// 7 bytes used, 1 byte padding
// 5 bytes used, 3 bytes padding
#ifdef USE_API_NOISE
std::shared_ptr<APINoiseContext> noise_ctx_ = std::make_shared<APINoiseContext>();

View File

@@ -62,11 +62,9 @@ async def async_run_logs(config: dict[str, Any], addresses: list[str]) -> None:
time_ = datetime.now()
message: bytes = msg.message
text = message.decode("utf8", "backslashreplace")
nanoseconds = time_.microsecond // 1000
timestamp = (
f"[{time_.hour:02}:{time_.minute:02}:{time_.second:02}.{nanoseconds:03}]"
)
for parsed_msg in parse_log_message(text, timestamp):
for parsed_msg in parse_log_message(
text, f"[{time_.hour:02}:{time_.minute:02}:{time_.second:02}]"
):
print(parsed_msg.replace("\033", "\\033") if dashboard else parsed_msg)
stop = await async_run(cli, on_log, name=name)

View File

@@ -179,9 +179,9 @@ class CustomAPIDevice {
* @param service_name The service to call.
*/
void call_homeassistant_service(const std::string &service_name) {
HomeassistantActionRequest resp;
HomeassistantServiceResponse resp;
resp.set_service(StringRef(service_name));
global_api_server->send_homeassistant_action(resp);
global_api_server->send_homeassistant_service_call(resp);
}
/** Call a Home Assistant service from ESPHome.
@@ -199,15 +199,15 @@ class CustomAPIDevice {
* @param data The data for the service call, mapping from string to string.
*/
void call_homeassistant_service(const std::string &service_name, const std::map<std::string, std::string> &data) {
HomeassistantActionRequest resp;
HomeassistantServiceResponse resp;
resp.set_service(StringRef(service_name));
resp.data.init(data.size());
for (auto &it : data) {
auto &kv = resp.data.emplace_back();
resp.data.emplace_back();
auto &kv = resp.data.back();
kv.set_key(StringRef(it.first));
kv.value = it.second;
}
global_api_server->send_homeassistant_action(resp);
global_api_server->send_homeassistant_service_call(resp);
}
/** Fire an ESPHome event in Home Assistant.
@@ -221,10 +221,10 @@ class CustomAPIDevice {
* @param event_name The event to fire.
*/
void fire_homeassistant_event(const std::string &event_name) {
HomeassistantActionRequest resp;
HomeassistantServiceResponse resp;
resp.set_service(StringRef(event_name));
resp.is_event = true;
global_api_server->send_homeassistant_action(resp);
global_api_server->send_homeassistant_service_call(resp);
}
/** Fire an ESPHome event in Home Assistant.
@@ -241,16 +241,16 @@ class CustomAPIDevice {
* @param data The data for the event, mapping from string to string.
*/
void fire_homeassistant_event(const std::string &service_name, const std::map<std::string, std::string> &data) {
HomeassistantActionRequest resp;
HomeassistantServiceResponse resp;
resp.set_service(StringRef(service_name));
resp.is_event = true;
resp.data.init(data.size());
for (auto &it : data) {
auto &kv = resp.data.emplace_back();
resp.data.emplace_back();
auto &kv = resp.data.back();
kv.set_key(StringRef(it.first));
kv.value = it.second;
}
global_api_server->send_homeassistant_action(resp);
global_api_server->send_homeassistant_service_call(resp);
}
#else
template<typename T = void> void call_homeassistant_service(const std::string &service_name) {

View File

@@ -3,15 +3,10 @@
#include "api_server.h"
#ifdef USE_API
#ifdef USE_API_HOMEASSISTANT_SERVICES
#include <functional>
#include <utility>
#include <vector>
#include "api_pb2.h"
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
#include "esphome/components/json/json_util.h"
#endif
#include "esphome/core/automation.h"
#include "esphome/core/helpers.h"
#include <vector>
namespace esphome::api {
@@ -41,191 +36,66 @@ template<typename... X> class TemplatableStringValue : public TemplatableValue<s
template<typename... Ts> class TemplatableKeyValuePair {
public:
// Default constructor needed for FixedVector::emplace_back()
TemplatableKeyValuePair() = default;
// Keys are always string literals from YAML dictionary keys (e.g., "code", "event")
// and never templatable values or lambdas. Only the value parameter can be a lambda/template.
// Using pass-by-value with std::move allows optimal performance for both lvalues and rvalues.
template<typename T> TemplatableKeyValuePair(std::string key, T value) : key(std::move(key)), value(value) {}
std::string key;
TemplatableStringValue<Ts...> value;
};
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
// Represents the response data from a Home Assistant action
class ActionResponse {
public:
ActionResponse(bool success, std::string error_message = "")
: success_(success), error_message_(std::move(error_message)) {}
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
ActionResponse(bool success, std::string error_message, const uint8_t *data, size_t data_len)
: success_(success), error_message_(std::move(error_message)) {
if (data == nullptr || data_len == 0)
return;
this->json_document_ = json::parse_json(data, data_len);
}
#endif
bool is_success() const { return this->success_; }
const std::string &get_error_message() const { return this->error_message_; }
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
// Get data as parsed JSON object (const version returns read-only view)
JsonObjectConst get_json() const { return this->json_document_.as<JsonObjectConst>(); }
#endif
protected:
bool success_;
std::string error_message_;
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
JsonDocument json_document_;
#endif
};
// Callback type for action responses
template<typename... Ts> using ActionResponseCallback = std::function<void(const ActionResponse &, Ts...)>;
#endif
template<typename... Ts> class HomeAssistantServiceCallAction : public Action<Ts...> {
public:
explicit HomeAssistantServiceCallAction(APIServer *parent, bool is_event) : parent_(parent) {
this->flags_.is_event = is_event;
}
explicit HomeAssistantServiceCallAction(APIServer *parent, bool is_event) : parent_(parent), is_event_(is_event) {}
template<typename T> void set_service(T service) { this->service_ = service; }
// Initialize FixedVector members - called from Python codegen with compile-time known sizes.
// Must be called before any add_* methods; capacity must match the number of subsequent add_* calls.
void init_data(size_t count) { this->data_.init(count); }
void init_data_template(size_t count) { this->data_template_.init(count); }
void init_variables(size_t count) { this->variables_.init(count); }
// Keys are always string literals from the Python code generation (e.g., cg.add(var.add_data("tag_id", templ))).
// The value parameter can be a lambda/template, but keys are never templatable.
template<typename K, typename V> void add_data(K &&key, V &&value) {
this->add_kv_(this->data_, std::forward<K>(key), std::forward<V>(value));
// Using pass-by-value allows the compiler to optimize for both lvalues and rvalues.
template<typename T> void add_data(std::string key, T value) { this->data_.emplace_back(std::move(key), value); }
template<typename T> void add_data_template(std::string key, T value) {
this->data_template_.emplace_back(std::move(key), value);
}
template<typename K, typename V> void add_data_template(K &&key, V &&value) {
this->add_kv_(this->data_template_, std::forward<K>(key), std::forward<V>(value));
template<typename T> void add_variable(std::string key, T value) {
this->variables_.emplace_back(std::move(key), value);
}
template<typename K, typename V> void add_variable(K &&key, V &&value) {
this->add_kv_(this->variables_, std::forward<K>(key), std::forward<V>(value));
}
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
template<typename T> void set_response_template(T response_template) {
this->response_template_ = response_template;
this->flags_.has_response_template = true;
}
void set_wants_status() { this->flags_.wants_status = true; }
void set_wants_response() { this->flags_.wants_response = true; }
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
Trigger<JsonObjectConst, Ts...> *get_success_trigger_with_response() const {
return this->success_trigger_with_response_;
}
#endif
Trigger<Ts...> *get_success_trigger() const { return this->success_trigger_; }
Trigger<std::string, Ts...> *get_error_trigger() const { return this->error_trigger_; }
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
void play(Ts... x) override {
HomeassistantActionRequest resp;
HomeassistantServiceResponse resp;
std::string service_value = this->service_.value(x...);
resp.set_service(StringRef(service_value));
resp.is_event = this->flags_.is_event;
this->populate_service_map(resp.data, this->data_, x...);
this->populate_service_map(resp.data_template, this->data_template_, x...);
this->populate_service_map(resp.variables, this->variables_, x...);
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
if (this->flags_.wants_status) {
// Generate a unique call ID for this service call
static uint32_t call_id_counter = 1;
uint32_t call_id = call_id_counter++;
resp.call_id = call_id;
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
if (this->flags_.wants_response) {
resp.wants_response = true;
// Set response template if provided
if (this->flags_.has_response_template) {
std::string response_template_value = this->response_template_.value(x...);
resp.response_template = response_template_value;
}
}
#endif
auto captured_args = std::make_tuple(x...);
this->parent_->register_action_response_callback(call_id, [this, captured_args](const ActionResponse &response) {
std::apply(
[this, &response](auto &&...args) {
if (response.is_success()) {
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
if (this->flags_.wants_response) {
this->success_trigger_with_response_->trigger(response.get_json(), args...);
} else
#endif
{
this->success_trigger_->trigger(args...);
}
} else {
this->error_trigger_->trigger(response.get_error_message(), args...);
}
},
captured_args);
});
}
#endif
this->parent_->send_homeassistant_action(resp);
}
protected:
// Helper to add key-value pairs to FixedVectors with perfect forwarding to avoid copies
template<typename K, typename V> void add_kv_(FixedVector<TemplatableKeyValuePair<Ts...>> &vec, K &&key, V &&value) {
auto &kv = vec.emplace_back();
kv.key = std::forward<K>(key);
kv.value = std::forward<V>(value);
}
template<typename VectorType, typename SourceType>
static void populate_service_map(VectorType &dest, SourceType &source, Ts... x) {
dest.init(source.size());
for (auto &it : source) {
auto &kv = dest.emplace_back();
resp.is_event = this->is_event_;
for (auto &it : this->data_) {
resp.data.emplace_back();
auto &kv = resp.data.back();
kv.set_key(StringRef(it.key));
kv.value = it.value.value(x...);
}
for (auto &it : this->data_template_) {
resp.data_template.emplace_back();
auto &kv = resp.data_template.back();
kv.set_key(StringRef(it.key));
kv.value = it.value.value(x...);
}
for (auto &it : this->variables_) {
resp.variables.emplace_back();
auto &kv = resp.variables.back();
kv.set_key(StringRef(it.key));
kv.value = it.value.value(x...);
}
this->parent_->send_homeassistant_service_call(resp);
}
protected:
APIServer *parent_;
bool is_event_;
TemplatableStringValue<Ts...> service_{};
FixedVector<TemplatableKeyValuePair<Ts...>> data_;
FixedVector<TemplatableKeyValuePair<Ts...>> data_template_;
FixedVector<TemplatableKeyValuePair<Ts...>> variables_;
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES
#ifdef USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
TemplatableStringValue<Ts...> response_template_{""};
Trigger<JsonObjectConst, Ts...> *success_trigger_with_response_ = new Trigger<JsonObjectConst, Ts...>();
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES_JSON
Trigger<Ts...> *success_trigger_ = new Trigger<Ts...>();
Trigger<std::string, Ts...> *error_trigger_ = new Trigger<std::string, Ts...>();
#endif // USE_API_HOMEASSISTANT_ACTION_RESPONSES
struct Flags {
uint8_t is_event : 1;
uint8_t wants_status : 1;
uint8_t wants_response : 1;
uint8_t has_response_template : 1;
uint8_t reserved : 5;
} flags_{0};
std::vector<TemplatableKeyValuePair<Ts...>> data_;
std::vector<TemplatableKeyValuePair<Ts...>> data_template_;
std::vector<TemplatableKeyValuePair<Ts...>> variables_;
};
} // namespace esphome::api
#endif
#endif

View File

@@ -7,134 +7,75 @@ namespace esphome::api {
static const char *const TAG = "api.proto";
uint32_t ProtoDecodableMessage::count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id) {
uint32_t count = 0;
const uint8_t *ptr = buffer;
const uint8_t *end = buffer + length;
while (ptr < end) {
uint32_t consumed;
// Parse field header (tag)
auto res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
if (!res.has_value()) {
break; // Invalid data, stop counting
}
uint32_t tag = res->as_uint32();
uint32_t field_type = tag & WIRE_TYPE_MASK;
uint32_t field_id = tag >> 3;
ptr += consumed;
// Count if this is the target field
if (field_id == target_field_id) {
count++;
}
// Skip field data based on wire type
switch (field_type) {
case WIRE_TYPE_VARINT: { // VarInt - parse and skip
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
if (!res.has_value()) {
return count; // Invalid data, return what we have
}
ptr += consumed;
break;
}
case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited - parse length and skip data
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
if (!res.has_value()) {
return count;
}
uint32_t field_length = res->as_uint32();
ptr += consumed;
if (ptr + field_length > end) {
return count; // Out of bounds
}
ptr += field_length;
break;
}
case WIRE_TYPE_FIXED32: { // 32-bit - skip 4 bytes
if (ptr + 4 > end) {
return count;
}
ptr += 4;
break;
}
default:
// Unknown wire type, can't continue
return count;
}
}
return count;
}
void ProtoDecodableMessage::decode(const uint8_t *buffer, size_t length) {
const uint8_t *ptr = buffer;
const uint8_t *end = buffer + length;
while (ptr < end) {
uint32_t i = 0;
bool error = false;
while (i < length) {
uint32_t consumed;
// Parse field header
auto res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
auto res = ProtoVarInt::parse(&buffer[i], length - i, &consumed);
if (!res.has_value()) {
ESP_LOGV(TAG, "Invalid field start at offset %ld", (long) (ptr - buffer));
return;
ESP_LOGV(TAG, "Invalid field start at %" PRIu32, i);
break;
}
uint32_t tag = res->as_uint32();
uint32_t field_type = tag & WIRE_TYPE_MASK;
uint32_t field_id = tag >> 3;
ptr += consumed;
uint32_t field_type = (res->as_uint32()) & 0b111;
uint32_t field_id = (res->as_uint32()) >> 3;
i += consumed;
switch (field_type) {
case WIRE_TYPE_VARINT: { // VarInt
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
case 0: { // VarInt
res = ProtoVarInt::parse(&buffer[i], length - i, &consumed);
if (!res.has_value()) {
ESP_LOGV(TAG, "Invalid VarInt at offset %ld", (long) (ptr - buffer));
return;
ESP_LOGV(TAG, "Invalid VarInt at %" PRIu32, i);
error = true;
break;
}
if (!this->decode_varint(field_id, *res)) {
ESP_LOGV(TAG, "Cannot decode VarInt field %" PRIu32 " with value %" PRIu32 "!", field_id, res->as_uint32());
}
ptr += consumed;
i += consumed;
break;
}
case WIRE_TYPE_LENGTH_DELIMITED: { // Length-delimited
res = ProtoVarInt::parse(ptr, end - ptr, &consumed);
case 2: { // Length-delimited
res = ProtoVarInt::parse(&buffer[i], length - i, &consumed);
if (!res.has_value()) {
ESP_LOGV(TAG, "Invalid Length Delimited at offset %ld", (long) (ptr - buffer));
return;
ESP_LOGV(TAG, "Invalid Length Delimited at %" PRIu32, i);
error = true;
break;
}
uint32_t field_length = res->as_uint32();
ptr += consumed;
if (ptr + field_length > end) {
ESP_LOGV(TAG, "Out-of-bounds Length Delimited at offset %ld", (long) (ptr - buffer));
return;
i += consumed;
if (field_length > length - i) {
ESP_LOGV(TAG, "Out-of-bounds Length Delimited at %" PRIu32, i);
error = true;
break;
}
if (!this->decode_length(field_id, ProtoLengthDelimited(ptr, field_length))) {
if (!this->decode_length(field_id, ProtoLengthDelimited(&buffer[i], field_length))) {
ESP_LOGV(TAG, "Cannot decode Length Delimited field %" PRIu32 "!", field_id);
}
ptr += field_length;
i += field_length;
break;
}
case WIRE_TYPE_FIXED32: { // 32-bit
if (ptr + 4 > end) {
ESP_LOGV(TAG, "Out-of-bounds Fixed32-bit at offset %ld", (long) (ptr - buffer));
return;
case 5: { // 32-bit
if (length - i < 4) {
ESP_LOGV(TAG, "Out-of-bounds Fixed32-bit at %" PRIu32, i);
error = true;
break;
}
uint32_t val = encode_uint32(ptr[3], ptr[2], ptr[1], ptr[0]);
uint32_t val = encode_uint32(buffer[i + 3], buffer[i + 2], buffer[i + 1], buffer[i]);
if (!this->decode_32bit(field_id, Proto32Bit(val))) {
ESP_LOGV(TAG, "Cannot decode 32-bit field %" PRIu32 " with value %" PRIu32 "!", field_id, val);
}
ptr += 4;
i += 4;
break;
}
default:
ESP_LOGV(TAG, "Invalid field type %u at offset %ld", field_type, (long) (ptr - buffer));
return;
ESP_LOGV(TAG, "Invalid field type at %" PRIu32, i);
error = true;
break;
}
if (error) {
break;
}
}
}

View File

@@ -15,13 +15,6 @@
namespace esphome::api {
// Protocol Buffer wire type constants
// See https://protobuf.dev/programming-guides/encoding/#structure
constexpr uint8_t WIRE_TYPE_VARINT = 0; // int32, int64, uint32, uint64, sint32, sint64, bool, enum
constexpr uint8_t WIRE_TYPE_LENGTH_DELIMITED = 2; // string, bytes, embedded messages, packed repeated fields
constexpr uint8_t WIRE_TYPE_FIXED32 = 5; // fixed32, sfixed32, float
constexpr uint8_t WIRE_TYPE_MASK = 0b111; // Mask to extract wire type from tag
// Helper functions for ZigZag encoding/decoding
inline constexpr uint32_t encode_zigzag32(int32_t value) {
return (static_cast<uint32_t>(value) << 1) ^ (static_cast<uint32_t>(value >> 31));
@@ -131,34 +124,6 @@ class ProtoVarInt {
// with ZigZag encoding
return decode_zigzag64(this->value_);
}
/**
* Encode the varint value to a pre-allocated buffer without bounds checking.
*
* @param buffer The pre-allocated buffer to write the encoded varint to
* @param len The size of the buffer in bytes
*
* @note The caller is responsible for ensuring the buffer is large enough
* to hold the encoded value. Use ProtoSize::varint() to calculate
* the exact size needed before calling this method.
* @note No bounds checking is performed for performance reasons.
*/
void encode_to_buffer_unchecked(uint8_t *buffer, size_t len) {
uint64_t val = this->value_;
if (val <= 0x7F) {
buffer[0] = val;
return;
}
size_t i = 0;
while (val && i < len) {
uint8_t temp = val & 0x7F;
val >>= 7;
if (val) {
buffer[i++] = temp | 0x80;
} else {
buffer[i++] = temp;
}
}
}
void encode(std::vector<uint8_t> &out) {
uint64_t val = this->value_;
if (val <= 0x7F) {
@@ -189,10 +154,6 @@ class ProtoLengthDelimited {
explicit ProtoLengthDelimited(const uint8_t *value, size_t length) : value_(value), length_(length) {}
std::string as_string() const { return std::string(reinterpret_cast<const char *>(this->value_), this->length_); }
// Direct access to raw data without string allocation
const uint8_t *data() const { return this->value_; }
size_t size() const { return this->length_; }
/**
* Decode the length-delimited data into an existing ProtoDecodableMessage instance.
*
@@ -248,7 +209,7 @@ class ProtoWriteBuffer {
* Following https://protobuf.dev/programming-guides/encoding/#structure
*/
void encode_field_raw(uint32_t field_id, uint32_t type) {
uint32_t val = (field_id << 3) | (type & WIRE_TYPE_MASK);
uint32_t val = (field_id << 3) | (type & 0b111);
this->encode_varint_raw(val);
}
void encode_string(uint32_t field_id, const char *string, size_t len, bool force = false) {
@@ -341,6 +302,28 @@ class ProtoWriteBuffer {
std::vector<uint8_t> *buffer_;
};
/**
* @brief Encode a uint16_t value as a varint directly to a buffer without bounds checking
*
* @param buffer The pre-allocated buffer to write the encoded varint to
* @param value The uint16_t value to encode (0-65535)
*
* @note The caller is responsible for ensuring the buffer is large enough (max 3 bytes for uint16_t)
* @note No bounds checking is performed for performance reasons
*/
inline void encode_varint_unchecked(uint8_t *buffer, uint16_t value) {
if (value < 128) {
buffer[0] = value;
} else if (value < 16384) {
buffer[0] = (value & 0x7F) | 0x80;
buffer[1] = value >> 7;
} else {
buffer[0] = (value & 0x7F) | 0x80;
buffer[1] = ((value >> 7) & 0x7F) | 0x80;
buffer[2] = value >> 14;
}
}
// Forward declaration
class ProtoSize;
@@ -361,18 +344,7 @@ class ProtoMessage {
// Base class for messages that support decoding
class ProtoDecodableMessage : public ProtoMessage {
public:
virtual void decode(const uint8_t *buffer, size_t length);
/**
* Count occurrences of a repeated field in a protobuf buffer.
* This is a lightweight scan that only parses tags and skips field data.
*
* @param buffer Pointer to the protobuf buffer
* @param length Length of the buffer in bytes
* @param target_field_id The field ID to count
* @return Number of times the field appears in the buffer
*/
static uint32_t count_repeated_field(const uint8_t *buffer, size_t length, uint32_t target_field_id);
void decode(const uint8_t *buffer, size_t length);
protected:
virtual bool decode_varint(uint32_t field_id, ProtoVarInt value) { return false; }
@@ -408,6 +380,33 @@ class ProtoSize {
uint32_t get_size() const { return total_size_; }
/**
* @brief Calculates the size in bytes needed to encode a uint8_t value as a varint
*
* @param value The uint8_t value to calculate size for
* @return The number of bytes needed to encode the value (1 or 2)
*/
static constexpr uint8_t varint(uint8_t value) {
// For uint8_t (0-255), we need at most 2 bytes
return (value < 128) ? 1 : 2;
}
/**
* @brief Calculates the size in bytes needed to encode a uint16_t value as a varint
*
* @param value The uint16_t value to calculate size for
* @return The number of bytes needed to encode the value (1-3)
*/
static constexpr uint8_t varint(uint16_t value) {
// For uint16_t (0-65535), we need at most 3 bytes
if (value < 128)
return 1; // 7 bits
else if (value < 16384)
return 2; // 14 bits
else
return 3; // 15-16 bits
}
/**
* @brief Calculates the size in bytes needed to encode a uint32_t value as a varint
*
@@ -417,11 +416,9 @@ class ProtoSize {
static constexpr uint32_t varint(uint32_t value) {
// Optimized varint size calculation using leading zeros
// Each 7 bits requires one byte in the varint encoding
if (value < 128)
if (value < 128) {
return 1; // 7 bits, common case for small values
// For larger values, count bytes needed based on the position of the highest bit set
if (value < 16384) {
} else if (value < 16384) {
return 2; // 14 bits
} else if (value < 2097152) {
return 3; // 21 bits
@@ -500,7 +497,7 @@ class ProtoSize {
* @return The number of bytes needed to encode the field ID and wire type
*/
static constexpr uint32_t field(uint32_t field_id, uint32_t type) {
uint32_t tag = (field_id << 3) | (type & WIRE_TYPE_MASK);
uint32_t tag = (field_id << 3) | (type & 0b111);
return varint(tag);
}
@@ -767,29 +764,13 @@ class ProtoSize {
template<typename MessageType>
inline void add_repeated_message(uint32_t field_id_size, const std::vector<MessageType> &messages) {
// Skip if the vector is empty
if (!messages.empty()) {
// Use the force version for all messages in the repeated field
for (const auto &message : messages) {
add_message_object_force(field_id_size, message);
}
if (messages.empty()) {
return;
}
}
/**
* @brief Calculates and adds the sizes of all messages in a repeated field to the total message size (FixedVector
* version)
*
* @tparam MessageType The type of the nested messages in the FixedVector
* @param messages FixedVector of message objects
*/
template<typename MessageType>
inline void add_repeated_message(uint32_t field_id_size, const FixedVector<MessageType> &messages) {
// Skip if the fixed vector is empty
if (!messages.empty()) {
// Use the force version for all messages in the repeated field
for (const auto &message : messages) {
add_message_object_force(field_id_size, message);
}
// Use the force version for all messages in the repeated field
for (const auto &message : messages) {
add_message_object_force(field_id_size, message);
}
}
};
@@ -811,7 +792,7 @@ inline void ProtoWriteBuffer::encode_message(uint32_t field_id, const ProtoMessa
this->buffer_->resize(this->buffer_->size() + varint_length_bytes);
// Write the length varint directly
ProtoVarInt(msg_length_bytes).encode_to_buffer_unchecked(this->buffer_->data() + begin, varint_length_bytes);
encode_varint_unchecked(this->buffer_->data() + begin, static_cast<uint16_t>(msg_length_bytes));
// Now encode the message content - it will append to the buffer
value.encode(*this);
@@ -865,7 +846,7 @@ class ProtoService {
}
// Authentication helper methods
inline bool check_connection_setup_() {
bool check_connection_setup_() {
if (!this->is_connection_setup()) {
this->on_no_setup_connection();
return false;
@@ -873,7 +854,7 @@ class ProtoService {
return true;
}
inline bool check_authenticated_() {
bool check_authenticated_() {
#ifdef USE_API_PASSWORD
if (!this->check_connection_setup_()) {
return false;

View File

@@ -12,16 +12,16 @@ template<> int32_t get_execute_arg_value<int32_t>(const ExecuteServiceArgument &
template<> float get_execute_arg_value<float>(const ExecuteServiceArgument &arg) { return arg.float_; }
template<> std::string get_execute_arg_value<std::string>(const ExecuteServiceArgument &arg) { return arg.string_; }
template<> std::vector<bool> get_execute_arg_value<std::vector<bool>>(const ExecuteServiceArgument &arg) {
return std::vector<bool>(arg.bool_array.begin(), arg.bool_array.end());
return arg.bool_array;
}
template<> std::vector<int32_t> get_execute_arg_value<std::vector<int32_t>>(const ExecuteServiceArgument &arg) {
return std::vector<int32_t>(arg.int_array.begin(), arg.int_array.end());
return arg.int_array;
}
template<> std::vector<float> get_execute_arg_value<std::vector<float>>(const ExecuteServiceArgument &arg) {
return std::vector<float>(arg.float_array.begin(), arg.float_array.end());
return arg.float_array;
}
template<> std::vector<std::string> get_execute_arg_value<std::vector<std::string>>(const ExecuteServiceArgument &arg) {
return std::vector<std::string>(arg.string_array.begin(), arg.string_array.end());
return arg.string_array;
}
template<> enums::ServiceArgType to_service_arg_type<bool>() { return enums::SERVICE_ARG_TYPE_BOOL; }

View File

@@ -35,9 +35,9 @@ template<typename... Ts> class UserServiceBase : public UserServiceDescriptor {
msg.set_name(StringRef(this->name_));
msg.key = this->key_;
std::array<enums::ServiceArgType, sizeof...(Ts)> arg_types = {to_service_arg_type<Ts>()...};
msg.args.init(sizeof...(Ts));
for (size_t i = 0; i < sizeof...(Ts); i++) {
auto &arg = msg.args.emplace_back();
for (int i = 0; i < sizeof...(Ts); i++) {
msg.args.emplace_back();
auto &arg = msg.args.back();
arg.type = arg_types[i];
arg.set_name(StringRef(this->arg_names_[i]));
}
@@ -55,7 +55,7 @@ template<typename... Ts> class UserServiceBase : public UserServiceDescriptor {
protected:
virtual void execute(Ts... x) = 0;
template<typename ArgsContainer, int... S> void execute_(const ArgsContainer &args, seq<S...> type) {
template<int... S> void execute_(std::vector<ExecuteServiceArgument> args, seq<S...> type) {
this->execute((get_execute_arg_value<Ts>(args[S]))...);
}

View File

@@ -2,7 +2,6 @@ import esphome.codegen as cg
from esphome.components import i2c, sensor
import esphome.config_validation as cv
from esphome.const import (
CONF_CLEAR,
CONF_GAIN,
CONF_ID,
DEVICE_CLASS_ILLUMINANCE,
@@ -30,6 +29,7 @@ CONF_F5 = "f5"
CONF_F6 = "f6"
CONF_F7 = "f7"
CONF_F8 = "f8"
CONF_CLEAR = "clear"
CONF_NIR = "nir"
UNIT_COUNTS = "#"

View File

@@ -8,7 +8,7 @@ from esphome.const import (
PLATFORM_LN882X,
PLATFORM_RTL87XX,
)
from esphome.core import CORE, CoroPriority, coroutine_with_priority
from esphome.core import CORE, coroutine_with_priority
CODEOWNERS = ["@esphome/core"]
@@ -27,7 +27,7 @@ CONFIG_SCHEMA = cv.All(
)
@coroutine_with_priority(CoroPriority.NETWORK_TRANSPORT)
@coroutine_with_priority(200.0)
async def to_code(config):
if CORE.is_esp32 or CORE.is_libretiny:
# https://github.com/ESP32Async/AsyncTCP

View File

@@ -16,7 +16,6 @@ from esphome.const import (
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_POWER_FACTOR,
DEVICE_CLASS_REACTIVE_POWER,
DEVICE_CLASS_VOLTAGE,
ICON_CURRENT_AC,
ICON_LIGHTBULB,
@@ -79,7 +78,6 @@ CONFIG_SCHEMA = (
unit_of_measurement=UNIT_VOLT_AMPS_REACTIVE,
icon=ICON_LIGHTBULB,
accuracy_decimals=2,
device_class=DEVICE_CLASS_REACTIVE_POWER,
state_class=STATE_CLASS_MEASUREMENT,
),
cv.Optional(CONF_POWER_FACTOR): sensor.sensor_schema(

View File

@@ -382,15 +382,20 @@ float ATM90E32Component::get_setup_priority() const { return setup_priority::IO;
// R/C registers can conly be cleared after the LastSPIData register is updated (register 78H)
// Peakdetect period: 05H. Bit 15:8 are PeakDet_period in ms. 7:0 are Sag_period
// Default is 143FH (20ms, 63ms)
uint16_t ATM90E32Component::read16_(uint16_t a_register) {
this->enable();
delay_microseconds_safe(1); // min delay between CS low and first SCK is 200ns - 1us is plenty
uint16_t ATM90E32Component::read16_transaction_(uint16_t a_register) {
uint8_t addrh = (1 << 7) | ((a_register >> 8) & 0x03);
uint8_t addrl = (a_register & 0xFF);
uint8_t data[4] = {addrh, addrl, 0x00, 0x00};
this->transfer_array(data, 4);
uint16_t output = encode_uint16(data[2], data[3]);
ESP_LOGVV(TAG, "read16_ 0x%04" PRIX16 " output 0x%04" PRIX16, a_register, output);
return output;
}
uint16_t ATM90E32Component::read16_(uint16_t a_register) {
this->enable();
delay_microseconds_safe(1); // min delay between CS low and first SCK is 200ns - 1us is plenty
uint16_t output = this->read16_transaction_(a_register);
delay_microseconds_safe(1); // allow the last clock to propagate before releasing CS
this->disable();
delay_microseconds_safe(1); // meet minimum CS high time before next transaction
@@ -398,8 +403,14 @@ uint16_t ATM90E32Component::read16_(uint16_t a_register) {
}
int ATM90E32Component::read32_(uint16_t addr_h, uint16_t addr_l) {
const uint16_t val_h = this->read16_(addr_h);
const uint16_t val_l = this->read16_(addr_l);
this->enable();
delay_microseconds_safe(1);
const uint16_t val_h = this->read16_transaction_(addr_h);
delay_microseconds_safe(1);
const uint16_t val_l = this->read16_transaction_(addr_l);
delay_microseconds_safe(1);
this->disable();
delay_microseconds_safe(1);
const int32_t val = (val_h << 16) | val_l;
ESP_LOGVV(TAG,

View File

@@ -140,6 +140,7 @@ class ATM90E32Component : public PollingComponent,
number::Number *ref_currents_[3]{nullptr, nullptr, nullptr};
#endif
uint16_t read16_(uint16_t a_register);
uint16_t read16_transaction_(uint16_t a_register);
int read32_(uint16_t addr_h, uint16_t addr_l);
void write16_(uint16_t a_register, uint16_t val, bool validate = true);
float get_local_phase_voltage_(uint8_t phase);

View File

@@ -17,12 +17,10 @@ from esphome.const import (
CONF_REACTIVE_POWER,
CONF_REVERSE_ACTIVE_ENERGY,
CONF_VOLTAGE,
DEVICE_CLASS_APPARENT_POWER,
DEVICE_CLASS_CURRENT,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_POWER_FACTOR,
DEVICE_CLASS_REACTIVE_POWER,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_VOLTAGE,
ENTITY_CATEGORY_DIAGNOSTIC,
@@ -102,13 +100,13 @@ ATM90E32_PHASE_SCHEMA = cv.Schema(
unit_of_measurement=UNIT_VOLT_AMPS_REACTIVE,
icon=ICON_LIGHTBULB,
accuracy_decimals=2,
device_class=DEVICE_CLASS_REACTIVE_POWER,
device_class=DEVICE_CLASS_POWER,
state_class=STATE_CLASS_MEASUREMENT,
),
cv.Optional(CONF_APPARENT_POWER): sensor.sensor_schema(
unit_of_measurement=UNIT_VOLT_AMPS,
accuracy_decimals=2,
device_class=DEVICE_CLASS_APPARENT_POWER,
device_class=DEVICE_CLASS_POWER,
state_class=STATE_CLASS_MEASUREMENT,
),
cv.Optional(CONF_POWER_FACTOR): sensor.sensor_schema(

View File

@@ -165,4 +165,4 @@ def final_validate_audio_schema(
async def to_code(config):
cg.add_library("esphome/esp-audio-libs", "2.0.1")
cg.add_library("esphome/esp-audio-libs", "1.1.4")

View File

@@ -57,7 +57,7 @@ const char *audio_file_type_to_string(AudioFileType file_type) {
void scale_audio_samples(const int16_t *audio_samples, int16_t *output_buffer, int16_t scale_factor,
size_t samples_to_scale) {
// Note the assembly dsps_mulc function has audio glitches if the input and output buffers are the same.
for (size_t i = 0; i < samples_to_scale; i++) {
for (int i = 0; i < samples_to_scale; i++) {
int32_t acc = (int32_t) audio_samples[i] * (int32_t) scale_factor;
output_buffer[i] = (int16_t) (acc >> 15);
}

View File

@@ -229,18 +229,18 @@ FileDecoderState AudioDecoder::decode_flac_() {
auto result = this->flac_decoder_->read_header(this->input_transfer_buffer_->get_buffer_start(),
this->input_transfer_buffer_->available());
if (result > esp_audio_libs::flac::FLAC_DECODER_HEADER_OUT_OF_DATA) {
// Serrious error reading FLAC header, there is no recovery
if (result == esp_audio_libs::flac::FLAC_DECODER_HEADER_OUT_OF_DATA) {
return FileDecoderState::POTENTIALLY_FAILED;
}
if (result != esp_audio_libs::flac::FLAC_DECODER_SUCCESS) {
// Couldn't read FLAC header
return FileDecoderState::FAILED;
}
size_t bytes_consumed = this->flac_decoder_->get_bytes_index();
this->input_transfer_buffer_->decrease_buffer_length(bytes_consumed);
if (result == esp_audio_libs::flac::FLAC_DECODER_HEADER_OUT_OF_DATA) {
return FileDecoderState::MORE_TO_PROCESS;
}
// Reallocate the output transfer buffer to the smallest necessary size
this->free_buffer_required_ = flac_decoder_->get_output_buffer_size_bytes();
if (!this->output_transfer_buffer_->reallocate(this->free_buffer_required_)) {
@@ -256,9 +256,9 @@ FileDecoderState AudioDecoder::decode_flac_() {
}
uint32_t output_samples = 0;
auto result = this->flac_decoder_->decode_frame(this->input_transfer_buffer_->get_buffer_start(),
this->input_transfer_buffer_->available(),
this->output_transfer_buffer_->get_buffer_end(), &output_samples);
auto result = this->flac_decoder_->decode_frame(
this->input_transfer_buffer_->get_buffer_start(), this->input_transfer_buffer_->available(),
reinterpret_cast<int16_t *>(this->output_transfer_buffer_->get_buffer_end()), &output_samples);
if (result == esp_audio_libs::flac::FLAC_DECODER_ERROR_OUT_OF_DATA) {
// Not an issue, just needs more data that we'll get next time.

View File

@@ -2,7 +2,7 @@ from esphome import automation
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_MIC_GAIN
from esphome.core import CoroPriority, coroutine_with_priority
from esphome.core import coroutine_with_priority
CODEOWNERS = ["@kbx81"]
IS_PLATFORM_COMPONENT = True
@@ -35,7 +35,7 @@ async def audio_adc_set_mic_gain_to_code(config, action_id, template_arg, args):
return var
@coroutine_with_priority(CoroPriority.CORE)
@coroutine_with_priority(100.0)
async def to_code(config):
cg.add_define("USE_AUDIO_ADC")
cg.add_global(audio_adc_ns.using)

View File

@@ -3,7 +3,7 @@ from esphome.automation import maybe_simple_id
import esphome.codegen as cg
import esphome.config_validation as cv
from esphome.const import CONF_ID, CONF_VOLUME
from esphome.core import CoroPriority, coroutine_with_priority
from esphome.core import coroutine_with_priority
CODEOWNERS = ["@kbx81"]
IS_PLATFORM_COMPONENT = True
@@ -51,7 +51,7 @@ async def audio_dac_set_volume_to_code(config, action_id, template_arg, args):
return var
@coroutine_with_priority(CoroPriority.CORE)
@coroutine_with_priority(100.0)
async def to_code(config):
cg.add_define("USE_AUDIO_DAC")
cg.add_global(audio_dac_ns.using)

View File

@@ -12,7 +12,7 @@ constexpr static const uint8_t AXS_READ_TOUCHPAD[11] = {0xb5, 0xab, 0xa5, 0x5a,
#define ERROR_CHECK(err) \
if ((err) != i2c::ERROR_OK) { \
this->status_set_warning(LOG_STR("Failed to communicate")); \
this->status_set_warning("Failed to communicate"); \
return; \
}
@@ -41,7 +41,7 @@ void AXS15231Touchscreen::update_touches() {
i2c::ErrorCode err;
uint8_t data[8]{};
err = this->write(AXS_READ_TOUCHPAD, sizeof(AXS_READ_TOUCHPAD));
err = this->write(AXS_READ_TOUCHPAD, sizeof(AXS_READ_TOUCHPAD), false);
ERROR_CHECK(err);
err = this->read(data, sizeof(data));
ERROR_CHECK(err);

View File

@@ -493,7 +493,7 @@ void BedJetHub::dump_config() {
" ble_client.app_id: %d\n"
" ble_client.conn_id: %d",
this->get_name().c_str(), this->parent()->app_id, this->parent()->get_conn_id());
LOG_UPDATE_INTERVAL(this);
LOG_UPDATE_INTERVAL(this)
ESP_LOGCONFIG(TAG, " Child components (%d):", this->children_.size());
for (auto *child : this->children_) {
ESP_LOGCONFIG(TAG, " - %s", child->describe().c_str());

View File

@@ -1,54 +0,0 @@
#include "esphome/core/log.h"
#include "bh1900nux.h"
namespace esphome {
namespace bh1900nux {
static const char *const TAG = "bh1900nux.sensor";
// I2C Registers
static const uint8_t TEMPERATURE_REG = 0x00;
static const uint8_t CONFIG_REG = 0x01; // Not used and supported yet
static const uint8_t TEMPERATURE_LOW_REG = 0x02; // Not used and supported yet
static const uint8_t TEMPERATURE_HIGH_REG = 0x03; // Not used and supported yet
static const uint8_t SOFT_RESET_REG = 0x04;
// I2C Command payloads
static const uint8_t SOFT_RESET_PAYLOAD = 0x01; // Soft Reset value
static const float SENSOR_RESOLUTION = 0.0625f; // Sensor resolution per bit in degrees celsius
void BH1900NUXSensor::setup() {
// Initialize I2C device
i2c::ErrorCode result_code =
this->write_register(SOFT_RESET_REG, &SOFT_RESET_PAYLOAD, 1); // Software Reset to check communication
if (result_code != i2c::ERROR_OK) {
this->mark_failed(ESP_LOG_MSG_COMM_FAIL);
return;
}
}
void BH1900NUXSensor::update() {
uint8_t temperature_raw[2];
if (this->read_register(TEMPERATURE_REG, temperature_raw, 2) != i2c::ERROR_OK) {
ESP_LOGE(TAG, ESP_LOG_MSG_COMM_FAIL);
return;
}
// Combined raw value, unsigned and unaligned 16 bit
// Temperature is represented in just 12 bits, shift needed
int16_t raw_temperature_register_value = encode_uint16(temperature_raw[0], temperature_raw[1]);
raw_temperature_register_value >>= 4;
float temperature_value = raw_temperature_register_value * SENSOR_RESOLUTION; // Apply sensor resolution
this->publish_state(temperature_value);
}
void BH1900NUXSensor::dump_config() {
LOG_SENSOR("", "BH1900NUX", this);
LOG_I2C_DEVICE(this);
LOG_UPDATE_INTERVAL(this);
}
} // namespace bh1900nux
} // namespace esphome

View File

@@ -1,18 +0,0 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/i2c/i2c.h"
namespace esphome {
namespace bh1900nux {
class BH1900NUXSensor : public sensor::Sensor, public PollingComponent, public i2c::I2CDevice {
public:
void setup() override;
void update() override;
void dump_config() override;
};
} // namespace bh1900nux
} // namespace esphome

View File

@@ -1,34 +0,0 @@
import esphome.codegen as cg
from esphome.components import i2c, sensor
import esphome.config_validation as cv
from esphome.const import (
DEVICE_CLASS_TEMPERATURE,
STATE_CLASS_MEASUREMENT,
UNIT_CELSIUS,
)
DEPENDENCIES = ["i2c"]
CODEOWNERS = ["@B48D81EFCC"]
sensor_ns = cg.esphome_ns.namespace("bh1900nux")
BH1900NUXSensor = sensor_ns.class_(
"BH1900NUXSensor", cg.PollingComponent, i2c.I2CDevice
)
CONFIG_SCHEMA = (
sensor.sensor_schema(
BH1900NUXSensor,
accuracy_decimals=1,
unit_of_measurement=UNIT_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
state_class=STATE_CLASS_MEASUREMENT,
)
.extend(cv.polling_component_schema("60s"))
.extend(i2c.i2c_device_schema(0x48))
)
async def to_code(config):
var = await sensor.new_sensor(config)
await cg.register_component(var, config)
await i2c.register_i2c_device(var, config)

View File

@@ -59,7 +59,7 @@ from esphome.const import (
DEVICE_CLASS_VIBRATION,
DEVICE_CLASS_WINDOW,
)
from esphome.core import CORE, CoroPriority, coroutine_with_priority
from esphome.core import CORE, coroutine_with_priority
from esphome.core.entity_helpers import entity_duplicate_validator, setup_entity
from esphome.cpp_generator import MockObjClass
from esphome.util import Registry
@@ -652,7 +652,7 @@ async def binary_sensor_is_off_to_code(config, condition_id, template_arg, args)
return cg.new_Pvariable(condition_id, template_arg, paren, False)
@coroutine_with_priority(CoroPriority.CORE)
@coroutine_with_priority(100.0)
async def to_code(config):
cg.add_global(binary_sensor_ns.using)

View File

@@ -7,19 +7,6 @@ namespace binary_sensor {
static const char *const TAG = "binary_sensor";
// Function implementation of LOG_BINARY_SENSOR macro to reduce code size
void log_binary_sensor(const char *tag, const char *prefix, const char *type, BinarySensor *obj) {
if (obj == nullptr) {
return;
}
ESP_LOGCONFIG(tag, "%s%s '%s'", prefix, type, obj->get_name().c_str());
if (!obj->get_device_class_ref().empty()) {
ESP_LOGCONFIG(tag, "%s Device Class: '%s'", prefix, obj->get_device_class_ref().c_str());
}
}
void BinarySensor::publish_state(bool new_state) {
if (this->filter_list_ == nullptr) {
this->send_state_internal(new_state);

View File

@@ -10,10 +10,13 @@ namespace esphome {
namespace binary_sensor {
class BinarySensor;
void log_binary_sensor(const char *tag, const char *prefix, const char *type, BinarySensor *obj);
#define LOG_BINARY_SENSOR(prefix, type, obj) log_binary_sensor(TAG, prefix, LOG_STR_LITERAL(type), obj)
#define LOG_BINARY_SENSOR(prefix, type, obj) \
if ((obj) != nullptr) { \
ESP_LOGCONFIG(TAG, "%s%s '%s'", prefix, LOG_STR_LITERAL(type), (obj)->get_name().c_str()); \
if (!(obj)->get_device_class().empty()) { \
ESP_LOGCONFIG(TAG, "%s Device Class: '%s'", prefix, (obj)->get_device_class().c_str()); \
} \
}
#define SUB_BINARY_SENSOR(name) \
protected: \

View File

@@ -97,10 +97,10 @@ void BL0906::handle_actions_() {
return;
}
ActionCallbackFuncPtr ptr_func = nullptr;
for (size_t i = 0; i < this->action_queue_.size(); i++) {
for (int i = 0; i < this->action_queue_.size(); i++) {
ptr_func = this->action_queue_[i];
if (ptr_func) {
ESP_LOGI(TAG, "HandleActionCallback[%zu]", i);
ESP_LOGI(TAG, "HandleActionCallback[%d]", i);
(this->*ptr_func)();
}
}

View File

@@ -1,6 +1 @@
import esphome.codegen as cg
CODEOWNERS = ["@tobias-", "@dan-s-github"]
CONF_BL0940_ID = "bl0940_id"
bl0940_ns = cg.esphome_ns.namespace("bl0940")
CODEOWNERS = ["@tobias-"]

View File

@@ -7,26 +7,28 @@ namespace bl0940 {
static const char *const TAG = "bl0940";
static const uint8_t BL0940_READ_COMMAND = 0x50; // 0x58 according to documentation
static const uint8_t BL0940_FULL_PACKET = 0xAA;
static const uint8_t BL0940_PACKET_HEADER = 0x55; // 0x58 according to en doc but 0x55 in cn doc
static const uint8_t BL0940_PACKET_HEADER = 0x55; // 0x58 according to documentation
static const uint8_t BL0940_WRITE_COMMAND = 0xA0; // 0xA8 according to documentation
static const uint8_t BL0940_REG_I_FAST_RMS_CTRL = 0x10;
static const uint8_t BL0940_REG_MODE = 0x18;
static const uint8_t BL0940_REG_SOFT_RESET = 0x19;
static const uint8_t BL0940_REG_USR_WRPROT = 0x1A;
static const uint8_t BL0940_REG_TPS_CTRL = 0x1B;
static const uint8_t BL0940_INIT[5][5] = {
const uint8_t BL0940_INIT[5][6] = {
// Reset to default
{BL0940_REG_SOFT_RESET, 0x5A, 0x5A, 0x5A, 0x38},
{BL0940_WRITE_COMMAND, BL0940_REG_SOFT_RESET, 0x5A, 0x5A, 0x5A, 0x38},
// Enable User Operation Write
{BL0940_REG_USR_WRPROT, 0x55, 0x00, 0x00, 0xF0},
{BL0940_WRITE_COMMAND, BL0940_REG_USR_WRPROT, 0x55, 0x00, 0x00, 0xF0},
// 0x0100 = CF_UNABLE energy pulse, AC_FREQ_SEL 50Hz, RMS_UPDATE_SEL 800mS
{BL0940_REG_MODE, 0x00, 0x10, 0x00, 0x37},
{BL0940_WRITE_COMMAND, BL0940_REG_MODE, 0x00, 0x10, 0x00, 0x37},
// 0x47FF = Over-current and leakage alarm on, Automatic temperature measurement, Interval 100mS
{BL0940_REG_TPS_CTRL, 0xFF, 0x47, 0x00, 0xFE},
{BL0940_WRITE_COMMAND, BL0940_REG_TPS_CTRL, 0xFF, 0x47, 0x00, 0xFE},
// 0x181C = Half cycle, Fast RMS threshold 6172
{BL0940_REG_I_FAST_RMS_CTRL, 0x1C, 0x18, 0x00, 0x1B}};
{BL0940_WRITE_COMMAND, BL0940_REG_I_FAST_RMS_CTRL, 0x1C, 0x18, 0x00, 0x1B}};
void BL0940::loop() {
DataPacket buffer;
@@ -34,8 +36,8 @@ void BL0940::loop() {
return;
}
if (read_array((uint8_t *) &buffer, sizeof(buffer))) {
if (this->validate_checksum_(&buffer)) {
this->received_package_(&buffer);
if (validate_checksum(&buffer)) {
received_package_(&buffer);
}
} else {
ESP_LOGW(TAG, "Junk on wire. Throwing away partial message");
@@ -44,151 +46,35 @@ void BL0940::loop() {
}
}
bool BL0940::validate_checksum_(DataPacket *data) {
uint8_t checksum = this->read_command_;
bool BL0940::validate_checksum(const DataPacket *data) {
uint8_t checksum = BL0940_READ_COMMAND;
// Whole package but checksum
uint8_t *raw = (uint8_t *) data;
for (uint32_t i = 0; i < sizeof(*data) - 1; i++) {
checksum += raw[i];
for (uint32_t i = 0; i < sizeof(data->raw) - 1; i++) {
checksum += data->raw[i];
}
checksum ^= 0xFF;
if (checksum != data->checksum) {
ESP_LOGW(TAG, "Invalid checksum! 0x%02X != 0x%02X", checksum, data->checksum);
ESP_LOGW(TAG, "BL0940 invalid checksum! 0x%02X != 0x%02X", checksum, data->checksum);
}
return checksum == data->checksum;
}
void BL0940::update() {
this->flush();
this->write_byte(this->read_command_);
this->write_byte(BL0940_READ_COMMAND);
this->write_byte(BL0940_FULL_PACKET);
}
void BL0940::setup() {
#ifdef USE_NUMBER
// add calibration callbacks
if (this->voltage_calibration_number_ != nullptr) {
this->voltage_calibration_number_->add_on_state_callback(
[this](float state) { this->voltage_calibration_callback_(state); });
if (this->voltage_calibration_number_->has_state()) {
this->voltage_calibration_callback_(this->voltage_calibration_number_->state);
}
}
if (this->current_calibration_number_ != nullptr) {
this->current_calibration_number_->add_on_state_callback(
[this](float state) { this->current_calibration_callback_(state); });
if (this->current_calibration_number_->has_state()) {
this->current_calibration_callback_(this->current_calibration_number_->state);
}
}
if (this->power_calibration_number_ != nullptr) {
this->power_calibration_number_->add_on_state_callback(
[this](float state) { this->power_calibration_callback_(state); });
if (this->power_calibration_number_->has_state()) {
this->power_calibration_callback_(this->power_calibration_number_->state);
}
}
if (this->energy_calibration_number_ != nullptr) {
this->energy_calibration_number_->add_on_state_callback(
[this](float state) { this->energy_calibration_callback_(state); });
if (this->energy_calibration_number_->has_state()) {
this->energy_calibration_callback_(this->energy_calibration_number_->state);
}
}
#endif
// calculate calibrated reference values
this->voltage_reference_cal_ = this->voltage_reference_ / this->voltage_cal_;
this->current_reference_cal_ = this->current_reference_ / this->current_cal_;
this->power_reference_cal_ = this->power_reference_ / this->power_cal_;
this->energy_reference_cal_ = this->energy_reference_ / this->energy_cal_;
for (auto *i : BL0940_INIT) {
this->write_byte(this->write_command_), this->write_array(i, 5);
this->write_array(i, 6);
delay(1);
}
this->flush();
}
float BL0940::calculate_power_reference_() {
// calculate power reference based on voltage and current reference
return this->voltage_reference_cal_ * this->current_reference_cal_ * 4046 / 324004 / 79931;
}
float BL0940::calculate_energy_reference_() {
// formula: 3600000 * 4046 * RL * R1 * 1000 / (1638.4 * 256) / Vref² / (R1 + R2)
// or: power_reference_ * 3600000 / (1638.4 * 256)
return this->power_reference_cal_ * 3600000 / (1638.4 * 256);
}
float BL0940::calculate_calibration_value_(float state) { return (100 + state) / 100; }
void BL0940::reset_calibration() {
#ifdef USE_NUMBER
if (this->current_calibration_number_ != nullptr && this->current_cal_ != 1) {
this->current_calibration_number_->make_call().set_value(0).perform();
}
if (this->voltage_calibration_number_ != nullptr && this->voltage_cal_ != 1) {
this->voltage_calibration_number_->make_call().set_value(0).perform();
}
if (this->power_calibration_number_ != nullptr && this->power_cal_ != 1) {
this->power_calibration_number_->make_call().set_value(0).perform();
}
if (this->energy_calibration_number_ != nullptr && this->energy_cal_ != 1) {
this->energy_calibration_number_->make_call().set_value(0).perform();
}
#endif
ESP_LOGD(TAG, "external calibration values restored to initial state");
}
void BL0940::current_calibration_callback_(float state) {
this->current_cal_ = this->calculate_calibration_value_(state);
ESP_LOGV(TAG, "update current calibration state: %f", this->current_cal_);
this->recalibrate_();
}
void BL0940::voltage_calibration_callback_(float state) {
this->voltage_cal_ = this->calculate_calibration_value_(state);
ESP_LOGV(TAG, "update voltage calibration state: %f", this->voltage_cal_);
this->recalibrate_();
}
void BL0940::power_calibration_callback_(float state) {
this->power_cal_ = this->calculate_calibration_value_(state);
ESP_LOGV(TAG, "update power calibration state: %f", this->power_cal_);
this->recalibrate_();
}
void BL0940::energy_calibration_callback_(float state) {
this->energy_cal_ = this->calculate_calibration_value_(state);
ESP_LOGV(TAG, "update energy calibration state: %f", this->energy_cal_);
this->recalibrate_();
}
void BL0940::recalibrate_() {
ESP_LOGV(TAG, "Recalibrating reference values");
this->voltage_reference_cal_ = this->voltage_reference_ / this->voltage_cal_;
this->current_reference_cal_ = this->current_reference_ / this->current_cal_;
if (this->voltage_cal_ != 1 || this->current_cal_ != 1) {
this->power_reference_ = this->calculate_power_reference_();
}
this->power_reference_cal_ = this->power_reference_ / this->power_cal_;
if (this->voltage_cal_ != 1 || this->current_cal_ != 1 || this->power_cal_ != 1) {
this->energy_reference_ = this->calculate_energy_reference_();
}
this->energy_reference_cal_ = this->energy_reference_ / this->energy_cal_;
ESP_LOGD(TAG,
"Recalibrated reference values:\n"
"Voltage: %f\n, Current: %f\n, Power: %f\n, Energy: %f\n",
this->voltage_reference_cal_, this->current_reference_cal_, this->power_reference_cal_,
this->energy_reference_cal_);
}
float BL0940::update_temp_(sensor::Sensor *sensor, uint16_le_t temperature) const {
auto tb = (float) temperature;
float BL0940::update_temp_(sensor::Sensor *sensor, ube16_t temperature) const {
auto tb = (float) (temperature.h << 8 | temperature.l);
float converted_temp = ((float) 170 / 448) * (tb / 2 - 32) - 45;
if (sensor != nullptr) {
if (sensor->has_state() && std::abs(converted_temp - sensor->get_state()) > max_temperature_diff_) {
@@ -201,40 +87,33 @@ float BL0940::update_temp_(sensor::Sensor *sensor, uint16_le_t temperature) cons
return converted_temp;
}
void BL0940::received_package_(DataPacket *data) {
void BL0940::received_package_(const DataPacket *data) const {
// Bad header
if (data->frame_header != BL0940_PACKET_HEADER) {
ESP_LOGI(TAG, "Invalid data. Header mismatch: %d", data->frame_header);
return;
}
// cf_cnt is only 24 bits, so track overflows
uint32_t cf_cnt = (uint24_t) data->cf_cnt;
cf_cnt |= this->prev_cf_cnt_ & 0xff000000;
if (cf_cnt < this->prev_cf_cnt_) {
cf_cnt += 0x1000000;
}
this->prev_cf_cnt_ = cf_cnt;
float v_rms = (float) to_uint32_t(data->v_rms) / voltage_reference_;
float i_rms = (float) to_uint32_t(data->i_rms) / current_reference_;
float watt = (float) to_int32_t(data->watt) / power_reference_;
uint32_t cf_cnt = to_uint32_t(data->cf_cnt);
float total_energy_consumption = (float) cf_cnt / energy_reference_;
float v_rms = (uint24_t) data->v_rms / this->voltage_reference_cal_;
float i_rms = (uint24_t) data->i_rms / this->current_reference_cal_;
float watt = (int24_t) data->watt / this->power_reference_cal_;
float total_energy_consumption = cf_cnt / this->energy_reference_cal_;
float tps1 = update_temp_(internal_temperature_sensor_, data->tps1);
float tps2 = update_temp_(external_temperature_sensor_, data->tps2);
float tps1 = update_temp_(this->internal_temperature_sensor_, data->tps1);
float tps2 = update_temp_(this->external_temperature_sensor_, data->tps2);
if (this->voltage_sensor_ != nullptr) {
this->voltage_sensor_->publish_state(v_rms);
if (voltage_sensor_ != nullptr) {
voltage_sensor_->publish_state(v_rms);
}
if (this->current_sensor_ != nullptr) {
this->current_sensor_->publish_state(i_rms);
if (current_sensor_ != nullptr) {
current_sensor_->publish_state(i_rms);
}
if (this->power_sensor_ != nullptr) {
this->power_sensor_->publish_state(watt);
if (power_sensor_ != nullptr) {
power_sensor_->publish_state(watt);
}
if (this->energy_sensor_ != nullptr) {
this->energy_sensor_->publish_state(total_energy_consumption);
if (energy_sensor_ != nullptr) {
energy_sensor_->publish_state(total_energy_consumption);
}
ESP_LOGV(TAG, "BL0940: U %fV, I %fA, P %fW, Cnt %" PRId32 ", ∫P %fkWh, T1 %f°C, T2 %f°C", v_rms, i_rms, watt, cf_cnt,
@@ -242,27 +121,7 @@ void BL0940::received_package_(DataPacket *data) {
}
void BL0940::dump_config() { // NOLINT(readability-function-cognitive-complexity)
ESP_LOGCONFIG(TAG,
"BL0940:\n"
" LEGACY MODE: %s\n"
" READ CMD: 0x%02X\n"
" WRITE CMD: 0x%02X\n"
" ------------------\n"
" Current reference: %f\n"
" Energy reference: %f\n"
" Power reference: %f\n"
" Voltage reference: %f\n",
TRUEFALSE(this->legacy_mode_enabled_), this->read_command_, this->write_command_,
this->current_reference_, this->energy_reference_, this->power_reference_, this->voltage_reference_);
#ifdef USE_NUMBER
ESP_LOGCONFIG(TAG,
"BL0940:\n"
" Current calibration: %f\n"
" Energy calibration: %f\n"
" Power calibration: %f\n"
" Voltage calibration: %f\n",
this->current_cal_, this->energy_cal_, this->power_cal_, this->voltage_cal_);
#endif
ESP_LOGCONFIG(TAG, "BL0940:");
LOG_SENSOR("", "Voltage", this->voltage_sensor_);
LOG_SENSOR("", "Current", this->current_sensor_);
LOG_SENSOR("", "Power", this->power_sensor_);
@@ -271,5 +130,9 @@ void BL0940::dump_config() { // NOLINT(readability-function-cognitive-complexit
LOG_SENSOR("", "External temperature", this->external_temperature_sensor_);
}
uint32_t BL0940::to_uint32_t(ube24_t input) { return input.h << 16 | input.m << 8 | input.l; }
int32_t BL0940::to_int32_t(sbe24_t input) { return input.h << 16 | input.m << 8 | input.l; }
} // namespace bl0940
} // namespace esphome

View File

@@ -1,48 +1,66 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/core/datatypes.h"
#include "esphome/core/defines.h"
#ifdef USE_BUTTON
#include "esphome/components/button/button.h"
#endif
#ifdef USE_NUMBER
#include "esphome/components/number/number.h"
#endif
#include "esphome/components/sensor/sensor.h"
#include "esphome/components/uart/uart.h"
#include "esphome/components/sensor/sensor.h"
namespace esphome {
namespace bl0940 {
static const float BL0940_PREF = 1430;
static const float BL0940_UREF = 33000;
static const float BL0940_IREF = 275000; // 2750 from tasmota. Seems to generate values 100 times too high
// Measured to 297J per click according to power consumption of 5 minutes
// Converted to kWh (3.6MJ per kwH). Used to be 256 * 1638.4
static const float BL0940_EREF = 3.6e6 / 297;
struct ube24_t { // NOLINT(readability-identifier-naming,altera-struct-pack-align)
uint8_t l;
uint8_t m;
uint8_t h;
} __attribute__((packed));
struct ube16_t { // NOLINT(readability-identifier-naming,altera-struct-pack-align)
uint8_t l;
uint8_t h;
} __attribute__((packed));
struct sbe24_t { // NOLINT(readability-identifier-naming,altera-struct-pack-align)
uint8_t l;
uint8_t m;
int8_t h;
} __attribute__((packed));
// Caveat: All these values are big endian (low - middle - high)
struct DataPacket {
uint8_t frame_header; // Packet header (0x58 in EN docs, 0x55 in CN docs and Tasmota tests)
uint24_le_t i_fast_rms; // Fast RMS current
uint24_le_t i_rms; // RMS current
uint24_t RESERVED0; // Reserved
uint24_le_t v_rms; // RMS voltage
uint24_t RESERVED1; // Reserved
int24_le_t watt; // Active power (can be negative for bidirectional measurement)
uint24_t RESERVED2; // Reserved
uint24_le_t cf_cnt; // Energy pulse count
uint24_t RESERVED3; // Reserved
uint16_le_t tps1; // Internal temperature sensor 1
uint8_t RESERVED4; // Reserved (should be 0x00)
uint16_le_t tps2; // Internal temperature sensor 2
uint8_t RESERVED5; // Reserved (should be 0x00)
uint8_t checksum; // Packet checksum
union DataPacket { // NOLINT(altera-struct-pack-align)
uint8_t raw[35];
struct {
uint8_t frame_header; // value of 0x58 according to docs. 0x55 according to Tasmota real world tests. Reality wins.
ube24_t i_fast_rms; // 0x00
ube24_t i_rms; // 0x04
ube24_t RESERVED0; // reserved
ube24_t v_rms; // 0x06
ube24_t RESERVED1; // reserved
sbe24_t watt; // 0x08
ube24_t RESERVED2; // reserved
ube24_t cf_cnt; // 0x0A
ube24_t RESERVED3; // reserved
ube16_t tps1; // 0x0c
uint8_t RESERVED4; // value of 0x00
ube16_t tps2; // 0x0c
uint8_t RESERVED5; // value of 0x00
uint8_t checksum; // checksum
};
} __attribute__((packed));
class BL0940 : public PollingComponent, public uart::UARTDevice {
public:
// Sensor setters
void set_voltage_sensor(sensor::Sensor *voltage_sensor) { voltage_sensor_ = voltage_sensor; }
void set_current_sensor(sensor::Sensor *current_sensor) { current_sensor_ = current_sensor; }
void set_power_sensor(sensor::Sensor *power_sensor) { power_sensor_ = power_sensor; }
void set_energy_sensor(sensor::Sensor *energy_sensor) { energy_sensor_ = energy_sensor; }
// Temperature sensor setters
void set_internal_temperature_sensor(sensor::Sensor *internal_temperature_sensor) {
internal_temperature_sensor_ = internal_temperature_sensor;
}
@@ -50,105 +68,42 @@ class BL0940 : public PollingComponent, public uart::UARTDevice {
external_temperature_sensor_ = external_temperature_sensor;
}
// Configuration setters
void set_legacy_mode(bool enable) { this->legacy_mode_enabled_ = enable; }
void set_read_command(uint8_t read_command) { this->read_command_ = read_command; }
void set_write_command(uint8_t write_command) { this->write_command_ = write_command; }
// Reference value setters (used for calibration and conversion)
void set_current_reference(float current_ref) { this->current_reference_ = current_ref; }
void set_energy_reference(float energy_ref) { this->energy_reference_ = energy_ref; }
void set_power_reference(float power_ref) { this->power_reference_ = power_ref; }
void set_voltage_reference(float voltage_ref) { this->voltage_reference_ = voltage_ref; }
#ifdef USE_NUMBER
// Calibration number setters (for Home Assistant number entities)
void set_current_calibration_number(number::Number *num) { this->current_calibration_number_ = num; }
void set_voltage_calibration_number(number::Number *num) { this->voltage_calibration_number_ = num; }
void set_power_calibration_number(number::Number *num) { this->power_calibration_number_ = num; }
void set_energy_calibration_number(number::Number *num) { this->energy_calibration_number_ = num; }
#endif
#ifdef USE_BUTTON
// Resets all calibration values to defaults (can be triggered by a button)
void reset_calibration();
#endif
// Core component methods
void loop() override;
void update() override;
void setup() override;
void dump_config() override;
protected:
// --- Sensor pointers ---
sensor::Sensor *voltage_sensor_{nullptr}; // Voltage sensor
sensor::Sensor *current_sensor_{nullptr}; // Current sensor
sensor::Sensor *power_sensor_{nullptr}; // Power sensor (can be negative for bidirectional)
sensor::Sensor *energy_sensor_{nullptr}; // Energy sensor
sensor::Sensor *internal_temperature_sensor_{nullptr}; // Internal temperature sensor
sensor::Sensor *external_temperature_sensor_{nullptr}; // External temperature sensor
sensor::Sensor *voltage_sensor_{nullptr};
sensor::Sensor *current_sensor_{nullptr};
// NB This may be negative as the circuits is seemingly able to measure
// power in both directions
sensor::Sensor *power_sensor_{nullptr};
sensor::Sensor *energy_sensor_{nullptr};
sensor::Sensor *internal_temperature_sensor_{nullptr};
sensor::Sensor *external_temperature_sensor_{nullptr};
#ifdef USE_NUMBER
// --- Calibration number entities (for dynamic calibration via HA UI) ---
number::Number *voltage_calibration_number_{nullptr};
number::Number *current_calibration_number_{nullptr};
number::Number *power_calibration_number_{nullptr};
number::Number *energy_calibration_number_{nullptr};
#endif
// Max difference between two measurements of the temperature. Used to avoid noise.
float max_temperature_diff_{0};
// Divide by this to turn into Watt
float power_reference_ = BL0940_PREF;
// Divide by this to turn into Volt
float voltage_reference_ = BL0940_UREF;
// Divide by this to turn into Ampere
float current_reference_ = BL0940_IREF;
// Divide by this to turn into kWh
float energy_reference_ = BL0940_EREF;
// --- Internal state ---
uint32_t prev_cf_cnt_ = 0; // Previous energy pulse count (for energy calculation)
float max_temperature_diff_{0}; // Max allowed temperature difference between two measurements (noise filter)
float update_temp_(sensor::Sensor *sensor, ube16_t packed_temperature) const;
// --- Reference values for conversion ---
float power_reference_; // Divider for raw power to get Watts
float power_reference_cal_; // Calibrated power reference
float voltage_reference_; // Divider for raw voltage to get Volts
float voltage_reference_cal_; // Calibrated voltage reference
float current_reference_; // Divider for raw current to get Amperes
float current_reference_cal_; // Calibrated current reference
float energy_reference_; // Divider for raw energy to get kWh
float energy_reference_cal_; // Calibrated energy reference
static uint32_t to_uint32_t(ube24_t input);
// --- Home Assistant calibration values (multipliers, default 1) ---
float current_cal_{1};
float voltage_cal_{1};
float power_cal_{1};
float energy_cal_{1};
static int32_t to_int32_t(sbe24_t input);
// --- Protocol commands ---
uint8_t read_command_;
uint8_t write_command_;
static bool validate_checksum(const DataPacket *data);
// --- Mode flags ---
bool legacy_mode_enabled_ = true;
// --- Methods ---
// Converts packed temperature value to float and updates the sensor
float update_temp_(sensor::Sensor *sensor, uint16_le_t packed_temperature) const;
// Validates the checksum of a received data packet
bool validate_checksum_(DataPacket *data);
// Handles a received data packet
void received_package_(DataPacket *data);
// Calculates reference values for calibration and conversion
float calculate_energy_reference_();
float calculate_power_reference_();
float calculate_calibration_value_(float state);
// Calibration update callbacks (used with number entities)
void current_calibration_callback_(float state);
void voltage_calibration_callback_(float state);
void power_calibration_callback_(float state);
void energy_calibration_callback_(float state);
void reset_calibration_callback_();
// Recalculates all reference values after calibration changes
void recalibrate_();
void received_package_(const DataPacket *data) const;
};
} // namespace bl0940
} // namespace esphome

View File

@@ -1,27 +0,0 @@
import esphome.codegen as cg
from esphome.components import button
import esphome.config_validation as cv
from esphome.const import ENTITY_CATEGORY_CONFIG, ICON_RESTART
from .. import CONF_BL0940_ID, bl0940_ns
from ..sensor import BL0940
CalibrationResetButton = bl0940_ns.class_(
"CalibrationResetButton", button.Button, cg.Component
)
CONFIG_SCHEMA = cv.All(
button.button_schema(
CalibrationResetButton,
entity_category=ENTITY_CATEGORY_CONFIG,
icon=ICON_RESTART,
)
.extend({cv.GenerateID(CONF_BL0940_ID): cv.use_id(BL0940)})
.extend(cv.COMPONENT_SCHEMA)
)
async def to_code(config):
var = await button.new_button(config)
await cg.register_component(var, config)
await cg.register_parented(var, config[CONF_BL0940_ID])

View File

@@ -1,20 +0,0 @@
#include "calibration_reset_button.h"
#include "../bl0940.h"
#include "esphome/core/hal.h"
#include "esphome/core/log.h"
#include "esphome/core/application.h"
namespace esphome {
namespace bl0940 {
static const char *const TAG = "bl0940.button.calibration_reset";
void CalibrationResetButton::dump_config() { LOG_BUTTON("", "Calibration Reset Button", this); }
void CalibrationResetButton::press_action() {
ESP_LOGI(TAG, "Resetting calibration defaults...");
this->parent_->reset_calibration();
}
} // namespace bl0940
} // namespace esphome

View File

@@ -1,19 +0,0 @@
#pragma once
#include "esphome/core/component.h"
#include "esphome/components/button/button.h"
namespace esphome {
namespace bl0940 {
class BL0940; // Forward declaration of BL0940 class
class CalibrationResetButton : public button::Button, public Component, public Parented<BL0940> {
public:
void dump_config() override;
void press_action() override;
};
} // namespace bl0940
} // namespace esphome

View File

@@ -1,94 +0,0 @@
import esphome.codegen as cg
from esphome.components import number
import esphome.config_validation as cv
from esphome.const import (
CONF_MAX_VALUE,
CONF_MIN_VALUE,
CONF_MODE,
CONF_RESTORE_VALUE,
CONF_STEP,
ENTITY_CATEGORY_CONFIG,
UNIT_PERCENT,
)
from .. import CONF_BL0940_ID, bl0940_ns
from ..sensor import BL0940
# Define calibration types
CONF_CURRENT_CALIBRATION = "current_calibration"
CONF_VOLTAGE_CALIBRATION = "voltage_calibration"
CONF_POWER_CALIBRATION = "power_calibration"
CONF_ENERGY_CALIBRATION = "energy_calibration"
BL0940Number = bl0940_ns.class_("BL0940Number")
CalibrationNumber = bl0940_ns.class_(
"CalibrationNumber", number.Number, cg.PollingComponent
)
def validate_min_max(config):
if config[CONF_MAX_VALUE] <= config[CONF_MIN_VALUE]:
raise cv.Invalid("max_value must be greater than min_value")
return config
CALIBRATION_SCHEMA = cv.All(
number.number_schema(
CalibrationNumber,
entity_category=ENTITY_CATEGORY_CONFIG,
unit_of_measurement=UNIT_PERCENT,
)
.extend(
{
cv.Optional(CONF_MODE, default="BOX"): cv.enum(number.NUMBER_MODES),
cv.Optional(CONF_MAX_VALUE, default=10): cv.All(
cv.float_, cv.Range(max=50)
),
cv.Optional(CONF_MIN_VALUE, default=-10): cv.All(
cv.float_, cv.Range(min=-50)
),
cv.Optional(CONF_STEP, default=0.1): cv.positive_float,
cv.Optional(CONF_RESTORE_VALUE): cv.boolean,
}
)
.extend(cv.COMPONENT_SCHEMA),
validate_min_max,
)
# Configuration schema for BL0940 numbers
CONFIG_SCHEMA = cv.Schema(
{
cv.GenerateID(): cv.declare_id(BL0940Number),
cv.GenerateID(CONF_BL0940_ID): cv.use_id(BL0940),
cv.Optional(CONF_CURRENT_CALIBRATION): CALIBRATION_SCHEMA,
cv.Optional(CONF_VOLTAGE_CALIBRATION): CALIBRATION_SCHEMA,
cv.Optional(CONF_POWER_CALIBRATION): CALIBRATION_SCHEMA,
cv.Optional(CONF_ENERGY_CALIBRATION): CALIBRATION_SCHEMA,
}
)
async def to_code(config):
# Get the BL0940 component instance
bl0940 = await cg.get_variable(config[CONF_BL0940_ID])
# Process all calibration types
for cal_type, setter_method in [
(CONF_CURRENT_CALIBRATION, "set_current_calibration_number"),
(CONF_VOLTAGE_CALIBRATION, "set_voltage_calibration_number"),
(CONF_POWER_CALIBRATION, "set_power_calibration_number"),
(CONF_ENERGY_CALIBRATION, "set_energy_calibration_number"),
]:
if conf := config.get(cal_type):
var = await number.new_number(
conf,
min_value=conf.get(CONF_MIN_VALUE),
max_value=conf.get(CONF_MAX_VALUE),
step=conf.get(CONF_STEP),
)
await cg.register_component(var, conf)
if restore_value := config.get(CONF_RESTORE_VALUE):
cg.add(var.set_restore_value(restore_value))
cg.add(getattr(bl0940, setter_method)(var))

View File

@@ -1,29 +0,0 @@
#include "calibration_number.h"
#include "esphome/core/log.h"
namespace esphome {
namespace bl0940 {
static const char *const TAG = "bl0940.number";
void CalibrationNumber::setup() {
float value = 0.0f;
if (this->restore_value_) {
this->pref_ = global_preferences->make_preference<float>(this->get_object_id_hash());
if (!this->pref_.load(&value)) {
value = 0.0f;
}
}
this->publish_state(value);
}
void CalibrationNumber::control(float value) {
this->publish_state(value);
if (this->restore_value_)
this->pref_.save(&value);
}
void CalibrationNumber::dump_config() { LOG_NUMBER("", "Calibration Number", this); }
} // namespace bl0940
} // namespace esphome

View File

@@ -1,26 +0,0 @@
#pragma once
#include "esphome/components/number/number.h"
#include "esphome/core/component.h"
#include "esphome/core/preferences.h"
namespace esphome {
namespace bl0940 {
class CalibrationNumber : public number::Number, public Component {
public:
void setup() override;
void dump_config() override;
float get_setup_priority() const override { return setup_priority::HARDWARE; }
void set_restore_value(bool restore_value) { this->restore_value_ = restore_value; }
protected:
void control(float value) override;
bool restore_value_{true};
ESPPreferenceObject pref_;
};
} // namespace bl0940
} // namespace esphome

Some files were not shown because too many files have changed in this diff Show More