mirror of
https://github.com/esphome/esphome.git
synced 2025-10-13 05:38:42 +00:00
Compare commits
58 Commits
memory_api
...
loop_fix_v
Author | SHA1 | Date | |
---|---|---|---|
![]() |
4c00861760 | ||
![]() |
2ff3e7fb2b | ||
![]() |
b0c20d7adb | ||
![]() |
2cc5e24b38 | ||
![]() |
3afa73b449 | ||
![]() |
dcf2697a2a | ||
![]() |
6a11700a6b | ||
![]() |
9bd9b043c8 | ||
![]() |
cb602c9b1a | ||
![]() |
b54beb357a | ||
![]() |
6abc2efd96 | ||
![]() |
be51093a7e | ||
![]() |
52219c4dcc | ||
![]() |
590cae13c0 | ||
![]() |
e15429b0f5 | ||
![]() |
b5cc668a45 | ||
![]() |
a1b0ae78e0 | ||
![]() |
fcc8a809e6 | ||
![]() |
48474c0f8c | ||
![]() |
9f9c95dd09 | ||
![]() |
a74fcbc8b6 | ||
![]() |
c8b898f9c5 | ||
![]() |
81bf2688b4 | ||
![]() |
87d2c9868f | ||
![]() |
5ca407e27c | ||
![]() |
5bbc2ab482 | ||
![]() |
309e8b4c92 | ||
![]() |
eee2987c99 | ||
![]() |
061e55f8c5 | ||
![]() |
9ad462d8c6 | ||
![]() |
56334b7832 | ||
![]() |
a4b7e0c700 | ||
![]() |
84ad7ee0e4 | ||
![]() |
d006008539 | ||
![]() |
f1af9d978c | ||
![]() |
6bb1e4c9c0 | ||
![]() |
785df05631 | ||
![]() |
82bdb08884 | ||
![]() |
b709ff84c3 | ||
![]() |
93266ad08f | ||
![]() |
2fac813f18 | ||
![]() |
a62c7a03dd | ||
![]() |
ec63247ae0 | ||
![]() |
0fe6e7169c | ||
![]() |
a0f4de1bfb | ||
![]() |
a541549d23 | ||
![]() |
b74715fe14 | ||
![]() |
5aff20a624 | ||
![]() |
7682b4e9a3 | ||
![]() |
6eabf709c6 | ||
![]() |
6209d4b493 | ||
![]() |
f10c361454 | ||
![]() |
27456c1370 | ||
![]() |
1aeefbe547 | ||
![]() |
3f3bce7ef4 | ||
![]() |
0acc58d5a1 | ||
![]() |
0b4ef0fea2 | ||
![]() |
a067bdb769 |
@@ -186,6 +186,11 @@ This document provides essential context for AI models interacting with this pro
|
||||
└── components/[component]/ # Component-specific tests
|
||||
```
|
||||
Run them using `script/test_build_components`. Use `-c <component>` to test specific components and `-t <target>` for specific platforms.
|
||||
* **Testing All Components Together:** To verify that all components can be tested together without ID conflicts or configuration issues, use:
|
||||
```bash
|
||||
./script/test_component_grouping.py -e config --all
|
||||
```
|
||||
This tests all components in a single build to catch conflicts that might not appear when testing components individually. Use `-e config` for fast configuration validation, or `-e compile` for full compilation testing.
|
||||
* **Debugging and Troubleshooting:**
|
||||
* **Debug Tools:**
|
||||
- `esphome config <file>.yaml` to validate configuration.
|
||||
|
@@ -1 +1 @@
|
||||
ab49c22900dd39c004623e450a1076b111d6741f31967a637ab6e0e3dd2e753e
|
||||
049d60eed541730efaa4c0dc5d337b4287bf29b6daa350b5dfc1f23915f1c52f
|
||||
|
100
.github/workflows/ci.yml
vendored
100
.github/workflows/ci.yml
vendored
@@ -177,6 +177,7 @@ jobs:
|
||||
clang-tidy: ${{ steps.determine.outputs.clang-tidy }}
|
||||
python-linters: ${{ steps.determine.outputs.python-linters }}
|
||||
changed-components: ${{ steps.determine.outputs.changed-components }}
|
||||
changed-components-with-tests: ${{ steps.determine.outputs.changed-components-with-tests }}
|
||||
component-test-count: ${{ steps.determine.outputs.component-test-count }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
@@ -204,6 +205,7 @@ jobs:
|
||||
echo "clang-tidy=$(echo "$output" | jq -r '.clang_tidy')" >> $GITHUB_OUTPUT
|
||||
echo "python-linters=$(echo "$output" | jq -r '.python_linters')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components=$(echo "$output" | jq -c '.changed_components')" >> $GITHUB_OUTPUT
|
||||
echo "changed-components-with-tests=$(echo "$output" | jq -c '.changed_components_with_tests')" >> $GITHUB_OUTPUT
|
||||
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
|
||||
|
||||
integration-tests:
|
||||
@@ -367,12 +369,13 @@ jobs:
|
||||
fail-fast: false
|
||||
max-parallel: 2
|
||||
matrix:
|
||||
file: ${{ fromJson(needs.determine-jobs.outputs.changed-components) }}
|
||||
file: ${{ fromJson(needs.determine-jobs.outputs.changed-components-with-tests) }}
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libsdl2-dev
|
||||
- name: Cache apt packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
|
||||
with:
|
||||
packages: libsdl2-dev
|
||||
version: 1.0
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
@@ -381,17 +384,17 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: test_build_components -e config -c ${{ matrix.file }}
|
||||
- name: Validate config for ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
./script/test_build_components -e config -c ${{ matrix.file }}
|
||||
- name: test_build_components -e compile -c ${{ matrix.file }}
|
||||
python3 script/test_build_components.py -e config -c ${{ matrix.file }}
|
||||
- name: Compile config for ${{ matrix.file }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
./script/test_build_components -e compile -c ${{ matrix.file }}
|
||||
python3 script/test_build_components.py -e compile -c ${{ matrix.file }}
|
||||
|
||||
test-build-components-splitter:
|
||||
name: Split components for testing into 20 groups maximum
|
||||
name: Split components for intelligent grouping (40 weighted per batch)
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
@@ -402,14 +405,26 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Split components into 20 groups
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Split components intelligently based on bus configurations
|
||||
id: split
|
||||
run: |
|
||||
components=$(echo '${{ needs.determine-jobs.outputs.changed-components }}' | jq -c '.[]' | shuf | jq -s -c '[_nwise(20) | join(" ")]')
|
||||
echo "components=$components" >> $GITHUB_OUTPUT
|
||||
. venv/bin/activate
|
||||
|
||||
# Use intelligent splitter that groups components with same bus configs
|
||||
components='${{ needs.determine-jobs.outputs.changed-components-with-tests }}'
|
||||
|
||||
echo "Splitting components intelligently..."
|
||||
output=$(python3 script/split_components_for_ci.py --components "$components" --batch-size 40 --output github)
|
||||
|
||||
echo "$output" >> $GITHUB_OUTPUT
|
||||
|
||||
test-build-components-split:
|
||||
name: Test split components
|
||||
name: Test components batch (${{ matrix.components }})
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
@@ -418,17 +433,23 @@ jobs:
|
||||
if: github.event_name == 'pull_request' && fromJSON(needs.determine-jobs.outputs.component-test-count) >= 100
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 4
|
||||
max-parallel: 5
|
||||
matrix:
|
||||
components: ${{ fromJson(needs.test-build-components-splitter.outputs.matrix) }}
|
||||
steps:
|
||||
- name: Show disk space
|
||||
run: |
|
||||
echo "Available disk space:"
|
||||
df -h
|
||||
|
||||
- name: List components
|
||||
run: echo ${{ matrix.components }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libsdl2-dev
|
||||
- name: Cache apt packages
|
||||
uses: awalsh128/cache-apt-pkgs-action@acb598e5ddbc6f68a970c5da0688d2f3a9f04d05 # v1.5.3
|
||||
with:
|
||||
packages: libsdl2-dev
|
||||
version: 1.0
|
||||
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
@@ -437,20 +458,37 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
- name: Validate config
|
||||
- name: Validate and compile components with intelligent grouping
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
for component in ${{ matrix.components }}; do
|
||||
./script/test_build_components -e config -c $component
|
||||
done
|
||||
- name: Compile config
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
mkdir build_cache
|
||||
export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
|
||||
for component in ${{ matrix.components }}; do
|
||||
./script/test_build_components -e compile -c $component
|
||||
done
|
||||
# Use /mnt for build files (70GB available vs ~29GB on /)
|
||||
# Bind mount PlatformIO directory to /mnt (tools, packages, build cache all go there)
|
||||
sudo mkdir -p /mnt/platformio
|
||||
sudo chown $USER:$USER /mnt/platformio
|
||||
mkdir -p ~/.platformio
|
||||
sudo mount --bind /mnt/platformio ~/.platformio
|
||||
|
||||
# Bind mount test build directory to /mnt
|
||||
sudo mkdir -p /mnt/test_build_components_build
|
||||
sudo chown $USER:$USER /mnt/test_build_components_build
|
||||
mkdir -p tests/test_build_components/build
|
||||
sudo mount --bind /mnt/test_build_components_build tests/test_build_components/build
|
||||
|
||||
# Convert space-separated components to comma-separated for Python script
|
||||
components_csv=$(echo "${{ matrix.components }}" | tr ' ' ',')
|
||||
|
||||
echo "Testing components: $components_csv"
|
||||
echo ""
|
||||
|
||||
# Run config validation with grouping
|
||||
python3 script/test_build_components.py -e config -c "$components_csv" -f
|
||||
|
||||
echo ""
|
||||
echo "Config validation passed! Starting compilation..."
|
||||
echo ""
|
||||
|
||||
# Run compilation with grouping
|
||||
python3 script/test_build_components.py -e compile -c "$components_csv" -f
|
||||
|
||||
pre-commit-ci-lite:
|
||||
name: pre-commit.ci lite
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/init@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
@@ -86,6 +86,6 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/analyze@f443b600d91635bebf5b0d9ebc620189c0d6fba5 # v4.30.8
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
with:
|
||||
debug-only: ${{ github.ref != 'refs/heads/dev' }} # Dry-run when not run on dev branch
|
||||
remove-stale-when-updated: true
|
||||
operations-per-run: 150
|
||||
operations-per-run: 400
|
||||
|
||||
# The 90 day stale policy for PRs
|
||||
# - PRs
|
||||
|
@@ -11,7 +11,7 @@ ci:
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.13.3
|
||||
rev: v0.14.0
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
|
@@ -139,6 +139,7 @@ esphome/components/ens160_base/* @latonita @vincentscode
|
||||
esphome/components/ens160_i2c/* @latonita
|
||||
esphome/components/ens160_spi/* @latonita
|
||||
esphome/components/ens210/* @itn3rd77
|
||||
esphome/components/epaper_spi/* @esphome/core
|
||||
esphome/components/es7210/* @kahrendt
|
||||
esphome/components/es7243e/* @kbx81
|
||||
esphome/components/es8156/* @kbx81
|
||||
@@ -429,6 +430,7 @@ esphome/components/speaker/media_player/* @kahrendt @synesthesiam
|
||||
esphome/components/spi/* @clydebarrow @esphome/core
|
||||
esphome/components/spi_device/* @clydebarrow
|
||||
esphome/components/spi_led_strip/* @clydebarrow
|
||||
esphome/components/split_buffer/* @jesserockz
|
||||
esphome/components/sprinkler/* @kbx81
|
||||
esphome/components/sps30/* @martgras
|
||||
esphome/components/ssd1322_base/* @kbx81
|
||||
|
2
Doxyfile
2
Doxyfile
@@ -48,7 +48,7 @@ PROJECT_NAME = ESPHome
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2025.10.0-dev
|
||||
PROJECT_NUMBER = 2025.11.0-dev
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
@@ -640,13 +640,6 @@ def command_vscode(args: ArgsProtocol) -> int | None:
|
||||
|
||||
|
||||
def command_compile(args: ArgsProtocol, config: ConfigType) -> int | None:
|
||||
# Set memory analysis options in config
|
||||
if args.analyze_memory:
|
||||
config.setdefault(CONF_ESPHOME, {})["analyze_memory"] = True
|
||||
|
||||
if args.memory_report:
|
||||
config.setdefault(CONF_ESPHOME, {})["memory_report_file"] = args.memory_report
|
||||
|
||||
exit_code = write_cpp(config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
@@ -1009,6 +1002,12 @@ def parse_args(argv):
|
||||
action="append",
|
||||
default=[],
|
||||
)
|
||||
options_parser.add_argument(
|
||||
"--testing-mode",
|
||||
help="Enable testing mode (disables validation checks for grouped component testing)",
|
||||
action="store_true",
|
||||
default=False,
|
||||
)
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description=f"ESPHome {const.__version__}", parents=[options_parser]
|
||||
@@ -1053,17 +1052,6 @@ def parse_args(argv):
|
||||
help="Only generate source code, do not compile.",
|
||||
action="store_true",
|
||||
)
|
||||
parser_compile.add_argument(
|
||||
"--analyze-memory",
|
||||
help="Analyze and display memory usage by component after compilation.",
|
||||
action="store_true",
|
||||
)
|
||||
parser_compile.add_argument(
|
||||
"--memory-report",
|
||||
help="Save memory analysis report to a file (supports .json or .txt).",
|
||||
type=str,
|
||||
metavar="FILE",
|
||||
)
|
||||
|
||||
parser_upload = subparsers.add_parser(
|
||||
"upload",
|
||||
@@ -1278,6 +1266,7 @@ def run_esphome(argv):
|
||||
|
||||
args = parse_args(argv)
|
||||
CORE.dashboard = args.dashboard
|
||||
CORE.testing_mode = args.testing_mode
|
||||
|
||||
# Create address cache from command-line arguments
|
||||
CORE.address_cache = AddressCache.from_cli_args(
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -19,13 +19,14 @@ namespace esphome::api {
|
||||
//#define HELPER_LOG_PACKETS
|
||||
|
||||
// Maximum message size limits to prevent OOM on constrained devices
|
||||
// Voice Assistant is our largest user at 1024 bytes per audio chunk
|
||||
// Using 2048 + 256 bytes overhead = 2304 bytes total to support voice and future needs
|
||||
// ESP8266 has very limited RAM and cannot support voice assistant
|
||||
// Handshake messages are limited to a small size for security
|
||||
static constexpr uint16_t MAX_HANDSHAKE_SIZE = 128;
|
||||
|
||||
// Data message limits vary by platform based on available memory
|
||||
#ifdef USE_ESP8266
|
||||
static constexpr uint16_t MAX_MESSAGE_SIZE = 512; // Keep small for memory constrained ESP8266
|
||||
static constexpr uint16_t MAX_MESSAGE_SIZE = 8192; // 8 KiB for ESP8266
|
||||
#else
|
||||
static constexpr uint16_t MAX_MESSAGE_SIZE = 2304; // Support voice (1024) + headroom for larger messages
|
||||
static constexpr uint16_t MAX_MESSAGE_SIZE = 32768; // 32 KiB for ESP32 and other platforms
|
||||
#endif
|
||||
|
||||
// Forward declaration
|
||||
|
@@ -133,9 +133,6 @@ APIError APINoiseFrameHelper::loop() {
|
||||
}
|
||||
|
||||
/** Read a packet into the rx_buf_.
|
||||
*
|
||||
* On success, rx_buf_ contains the frame data and state variables are cleared for the next read.
|
||||
* Caller is responsible for consuming rx_buf_ (e.g., via std::move).
|
||||
*
|
||||
* @return APIError::OK if a full packet is in rx_buf_
|
||||
*
|
||||
@@ -145,11 +142,6 @@ APIError APINoiseFrameHelper::loop() {
|
||||
* errno API_ERROR_HANDSHAKE_PACKET_LEN: Packet too big for this phase.
|
||||
*/
|
||||
APIError APINoiseFrameHelper::try_read_frame_() {
|
||||
// Clear buffer when starting a new frame (rx_buf_len_ == 0 means not resuming after WOULD_BLOCK)
|
||||
if (this->rx_buf_len_ == 0) {
|
||||
this->rx_buf_.clear();
|
||||
}
|
||||
|
||||
// read header
|
||||
if (rx_header_buf_len_ < 3) {
|
||||
// no header information yet
|
||||
@@ -176,18 +168,12 @@ APIError APINoiseFrameHelper::try_read_frame_() {
|
||||
// read body
|
||||
uint16_t msg_size = (((uint16_t) rx_header_buf_[1]) << 8) | rx_header_buf_[2];
|
||||
|
||||
if (state_ != State::DATA && msg_size > 128) {
|
||||
// for handshake message only permit up to 128 bytes
|
||||
// Check against size limits to prevent OOM: MAX_HANDSHAKE_SIZE for handshake, MAX_MESSAGE_SIZE for data
|
||||
uint16_t limit = (state_ == State::DATA) ? MAX_MESSAGE_SIZE : MAX_HANDSHAKE_SIZE;
|
||||
if (msg_size > limit) {
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Bad packet len for handshake: %d", msg_size);
|
||||
return APIError::BAD_HANDSHAKE_PACKET_LEN;
|
||||
}
|
||||
|
||||
// Check against maximum message size to prevent OOM
|
||||
if (msg_size > MAX_MESSAGE_SIZE) {
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Bad packet: message size %u exceeds maximum %u", msg_size, MAX_MESSAGE_SIZE);
|
||||
return APIError::BAD_DATA_PACKET;
|
||||
HELPER_LOG("Bad packet: message size %u exceeds maximum %u", msg_size, limit);
|
||||
return (state_ == State::DATA) ? APIError::BAD_DATA_PACKET : APIError::BAD_HANDSHAKE_PACKET_LEN;
|
||||
}
|
||||
|
||||
// Reserve space for body
|
||||
|
@@ -48,20 +48,12 @@ APIError APIPlaintextFrameHelper::loop() {
|
||||
}
|
||||
|
||||
/** Read a packet into the rx_buf_.
|
||||
*
|
||||
* On success, rx_buf_ contains the frame data and state variables are cleared for the next read.
|
||||
* Caller is responsible for consuming rx_buf_ (e.g., via std::move).
|
||||
*
|
||||
* @return See APIError
|
||||
*
|
||||
* error API_ERROR_BAD_INDICATOR: Bad indicator byte at start of frame.
|
||||
*/
|
||||
APIError APIPlaintextFrameHelper::try_read_frame_() {
|
||||
// Clear buffer when starting a new frame (rx_buf_len_ == 0 means not resuming after WOULD_BLOCK)
|
||||
if (this->rx_buf_len_ == 0) {
|
||||
this->rx_buf_.clear();
|
||||
}
|
||||
|
||||
// read header
|
||||
while (!rx_header_parsed_) {
|
||||
// Now that we know when the socket is ready, we can read up to 3 bytes
|
||||
|
@@ -165,4 +165,4 @@ def final_validate_audio_schema(
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
cg.add_library("esphome/esp-audio-libs", "1.1.4")
|
||||
cg.add_library("esphome/esp-audio-libs", "2.0.1")
|
||||
|
@@ -229,18 +229,18 @@ FileDecoderState AudioDecoder::decode_flac_() {
|
||||
auto result = this->flac_decoder_->read_header(this->input_transfer_buffer_->get_buffer_start(),
|
||||
this->input_transfer_buffer_->available());
|
||||
|
||||
if (result == esp_audio_libs::flac::FLAC_DECODER_HEADER_OUT_OF_DATA) {
|
||||
return FileDecoderState::POTENTIALLY_FAILED;
|
||||
}
|
||||
|
||||
if (result != esp_audio_libs::flac::FLAC_DECODER_SUCCESS) {
|
||||
// Couldn't read FLAC header
|
||||
if (result > esp_audio_libs::flac::FLAC_DECODER_HEADER_OUT_OF_DATA) {
|
||||
// Serrious error reading FLAC header, there is no recovery
|
||||
return FileDecoderState::FAILED;
|
||||
}
|
||||
|
||||
size_t bytes_consumed = this->flac_decoder_->get_bytes_index();
|
||||
this->input_transfer_buffer_->decrease_buffer_length(bytes_consumed);
|
||||
|
||||
if (result == esp_audio_libs::flac::FLAC_DECODER_HEADER_OUT_OF_DATA) {
|
||||
return FileDecoderState::MORE_TO_PROCESS;
|
||||
}
|
||||
|
||||
// Reallocate the output transfer buffer to the smallest necessary size
|
||||
this->free_buffer_required_ = flac_decoder_->get_output_buffer_size_bytes();
|
||||
if (!this->output_transfer_buffer_->reallocate(this->free_buffer_required_)) {
|
||||
@@ -256,9 +256,9 @@ FileDecoderState AudioDecoder::decode_flac_() {
|
||||
}
|
||||
|
||||
uint32_t output_samples = 0;
|
||||
auto result = this->flac_decoder_->decode_frame(
|
||||
this->input_transfer_buffer_->get_buffer_start(), this->input_transfer_buffer_->available(),
|
||||
reinterpret_cast<int16_t *>(this->output_transfer_buffer_->get_buffer_end()), &output_samples);
|
||||
auto result = this->flac_decoder_->decode_frame(this->input_transfer_buffer_->get_buffer_start(),
|
||||
this->input_transfer_buffer_->available(),
|
||||
this->output_transfer_buffer_->get_buffer_end(), &output_samples);
|
||||
|
||||
if (result == esp_audio_libs::flac::FLAC_DECODER_ERROR_OUT_OF_DATA) {
|
||||
// Not an issue, just needs more data that we'll get next time.
|
||||
|
@@ -105,9 +105,9 @@ class Canbus : public Component {
|
||||
CallbackManager<void(uint32_t can_id, bool extended_id, bool rtr, const std::vector<uint8_t> &data)>
|
||||
callback_manager_{};
|
||||
|
||||
virtual bool setup_internal();
|
||||
virtual Error send_message(struct CanFrame *frame);
|
||||
virtual Error read_message(struct CanFrame *frame);
|
||||
virtual bool setup_internal() = 0;
|
||||
virtual Error send_message(struct CanFrame *frame) = 0;
|
||||
virtual Error read_message(struct CanFrame *frame) = 0;
|
||||
};
|
||||
|
||||
template<typename... Ts> class CanbusSendAction : public Action<Ts...>, public Parented<Canbus> {
|
||||
|
@@ -5,7 +5,7 @@ namespace dashboard_import {
|
||||
|
||||
static std::string g_package_import_url; // NOLINT
|
||||
|
||||
std::string get_package_import_url() { return g_package_import_url; }
|
||||
const std::string &get_package_import_url() { return g_package_import_url; }
|
||||
void set_package_import_url(std::string url) { g_package_import_url = std::move(url); }
|
||||
|
||||
} // namespace dashboard_import
|
||||
|
@@ -5,7 +5,7 @@
|
||||
namespace esphome {
|
||||
namespace dashboard_import {
|
||||
|
||||
std::string get_package_import_url();
|
||||
const std::string &get_package_import_url();
|
||||
void set_package_import_url(std::string url);
|
||||
|
||||
} // namespace dashboard_import
|
||||
|
1
esphome/components/epaper_spi/__init__.py
Normal file
1
esphome/components/epaper_spi/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
CODEOWNERS = ["@esphome/core"]
|
80
esphome/components/epaper_spi/display.py
Normal file
80
esphome/components/epaper_spi/display.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from esphome import core, pins
|
||||
import esphome.codegen as cg
|
||||
from esphome.components import display, spi
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import (
|
||||
CONF_BUSY_PIN,
|
||||
CONF_DC_PIN,
|
||||
CONF_ID,
|
||||
CONF_LAMBDA,
|
||||
CONF_MODEL,
|
||||
CONF_PAGES,
|
||||
CONF_RESET_DURATION,
|
||||
CONF_RESET_PIN,
|
||||
)
|
||||
|
||||
AUTO_LOAD = ["split_buffer"]
|
||||
DEPENDENCIES = ["spi"]
|
||||
|
||||
epaper_spi_ns = cg.esphome_ns.namespace("epaper_spi")
|
||||
EPaperBase = epaper_spi_ns.class_(
|
||||
"EPaperBase", cg.PollingComponent, spi.SPIDevice, display.DisplayBuffer
|
||||
)
|
||||
|
||||
EPaperSpectraE6 = epaper_spi_ns.class_("EPaperSpectraE6", EPaperBase)
|
||||
EPaper7p3InSpectraE6 = epaper_spi_ns.class_("EPaper7p3InSpectraE6", EPaperSpectraE6)
|
||||
|
||||
MODELS = {
|
||||
"7.3in-spectra-e6": EPaper7p3InSpectraE6,
|
||||
}
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
display.FULL_DISPLAY_SCHEMA.extend(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(EPaperBase),
|
||||
cv.Required(CONF_DC_PIN): pins.gpio_output_pin_schema,
|
||||
cv.Required(CONF_MODEL): cv.one_of(*MODELS, lower=True, space="-"),
|
||||
cv.Optional(CONF_RESET_PIN): pins.gpio_output_pin_schema,
|
||||
cv.Optional(CONF_BUSY_PIN): pins.gpio_input_pin_schema,
|
||||
cv.Optional(CONF_RESET_DURATION): cv.All(
|
||||
cv.positive_time_period_milliseconds,
|
||||
cv.Range(max=core.TimePeriod(milliseconds=500)),
|
||||
),
|
||||
}
|
||||
)
|
||||
.extend(cv.polling_component_schema("60s"))
|
||||
.extend(spi.spi_device_schema()),
|
||||
cv.has_at_most_one_key(CONF_PAGES, CONF_LAMBDA),
|
||||
)
|
||||
|
||||
FINAL_VALIDATE_SCHEMA = spi.final_validate_device_schema(
|
||||
"epaper_spi", require_miso=False, require_mosi=True
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
model = MODELS[config[CONF_MODEL]]
|
||||
|
||||
rhs = model.new()
|
||||
var = cg.Pvariable(config[CONF_ID], rhs, model)
|
||||
|
||||
await display.register_display(var, config)
|
||||
await spi.register_spi_device(var, config)
|
||||
|
||||
dc = await cg.gpio_pin_expression(config[CONF_DC_PIN])
|
||||
cg.add(var.set_dc_pin(dc))
|
||||
|
||||
if CONF_LAMBDA in config:
|
||||
lambda_ = await cg.process_lambda(
|
||||
config[CONF_LAMBDA], [(display.DisplayRef, "it")], return_type=cg.void
|
||||
)
|
||||
cg.add(var.set_writer(lambda_))
|
||||
if CONF_RESET_PIN in config:
|
||||
reset = await cg.gpio_pin_expression(config[CONF_RESET_PIN])
|
||||
cg.add(var.set_reset_pin(reset))
|
||||
if CONF_BUSY_PIN in config:
|
||||
busy = await cg.gpio_pin_expression(config[CONF_BUSY_PIN])
|
||||
cg.add(var.set_busy_pin(busy))
|
||||
if CONF_RESET_DURATION in config:
|
||||
cg.add(var.set_reset_duration(config[CONF_RESET_DURATION]))
|
227
esphome/components/epaper_spi/epaper_spi.cpp
Normal file
227
esphome/components/epaper_spi/epaper_spi.cpp
Normal file
@@ -0,0 +1,227 @@
|
||||
#include "epaper_spi.h"
|
||||
#include <cinttypes>
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome::epaper_spi {
|
||||
|
||||
static const char *const TAG = "epaper_spi";
|
||||
|
||||
static const LogString *epaper_state_to_string(EPaperState state) {
|
||||
switch (state) {
|
||||
case EPaperState::IDLE:
|
||||
return LOG_STR("IDLE");
|
||||
case EPaperState::UPDATE:
|
||||
return LOG_STR("UPDATE");
|
||||
case EPaperState::RESET:
|
||||
return LOG_STR("RESET");
|
||||
case EPaperState::INITIALISE:
|
||||
return LOG_STR("INITIALISE");
|
||||
case EPaperState::TRANSFER_DATA:
|
||||
return LOG_STR("TRANSFER_DATA");
|
||||
case EPaperState::POWER_ON:
|
||||
return LOG_STR("POWER_ON");
|
||||
case EPaperState::REFRESH_SCREEN:
|
||||
return LOG_STR("REFRESH_SCREEN");
|
||||
case EPaperState::POWER_OFF:
|
||||
return LOG_STR("POWER_OFF");
|
||||
case EPaperState::DEEP_SLEEP:
|
||||
return LOG_STR("DEEP_SLEEP");
|
||||
default:
|
||||
return LOG_STR("UNKNOWN");
|
||||
}
|
||||
}
|
||||
|
||||
void EPaperBase::setup() {
|
||||
if (!this->init_buffer_(this->get_buffer_length())) {
|
||||
this->mark_failed("Failed to initialise buffer");
|
||||
return;
|
||||
}
|
||||
this->setup_pins_();
|
||||
this->spi_setup();
|
||||
}
|
||||
|
||||
bool EPaperBase::init_buffer_(size_t buffer_length) {
|
||||
if (!this->buffer_.init(buffer_length)) {
|
||||
return false;
|
||||
}
|
||||
this->clear();
|
||||
return true;
|
||||
}
|
||||
|
||||
void EPaperBase::setup_pins_() {
|
||||
this->dc_pin_->setup(); // OUTPUT
|
||||
this->dc_pin_->digital_write(false);
|
||||
|
||||
if (this->reset_pin_ != nullptr) {
|
||||
this->reset_pin_->setup(); // OUTPUT
|
||||
this->reset_pin_->digital_write(true);
|
||||
}
|
||||
|
||||
if (this->busy_pin_ != nullptr) {
|
||||
this->busy_pin_->setup(); // INPUT
|
||||
}
|
||||
}
|
||||
|
||||
float EPaperBase::get_setup_priority() const { return setup_priority::PROCESSOR; }
|
||||
|
||||
void EPaperBase::command(uint8_t value) {
|
||||
this->start_command_();
|
||||
this->write_byte(value);
|
||||
this->end_command_();
|
||||
}
|
||||
|
||||
void EPaperBase::data(uint8_t value) {
|
||||
this->start_data_();
|
||||
this->write_byte(value);
|
||||
this->end_data_();
|
||||
}
|
||||
|
||||
// write a command followed by zero or more bytes of data.
|
||||
// The command is the first byte, length is the length of data only in the second byte, followed by the data.
|
||||
// [COMMAND, LENGTH, DATA...]
|
||||
void EPaperBase::cmd_data(const uint8_t *data) {
|
||||
const uint8_t command = data[0];
|
||||
const uint8_t length = data[1];
|
||||
const uint8_t *ptr = data + 2;
|
||||
|
||||
ESP_LOGVV(TAG, "Command: 0x%02X, Length: %d, Data: %s", command, length,
|
||||
format_hex_pretty(ptr, length, '.', false).c_str());
|
||||
|
||||
this->dc_pin_->digital_write(false);
|
||||
this->enable();
|
||||
this->write_byte(command);
|
||||
if (length > 0) {
|
||||
this->dc_pin_->digital_write(true);
|
||||
this->write_array(ptr, length);
|
||||
}
|
||||
this->disable();
|
||||
}
|
||||
|
||||
bool EPaperBase::is_idle_() {
|
||||
if (this->busy_pin_ == nullptr) {
|
||||
return true;
|
||||
}
|
||||
return !this->busy_pin_->digital_read();
|
||||
}
|
||||
|
||||
void EPaperBase::reset() {
|
||||
if (this->reset_pin_ != nullptr) {
|
||||
this->reset_pin_->digital_write(false);
|
||||
this->disable_loop();
|
||||
this->set_timeout(this->reset_duration_, [this] {
|
||||
this->reset_pin_->digital_write(true);
|
||||
this->set_timeout(20, [this] { this->enable_loop(); });
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void EPaperBase::update() {
|
||||
if (!this->state_queue_.empty()) {
|
||||
ESP_LOGE(TAG, "Display update already in progress - %s",
|
||||
LOG_STR_ARG(epaper_state_to_string(this->state_queue_.front())));
|
||||
return;
|
||||
}
|
||||
|
||||
this->state_queue_.push(EPaperState::UPDATE);
|
||||
this->state_queue_.push(EPaperState::RESET);
|
||||
this->state_queue_.push(EPaperState::INITIALISE);
|
||||
this->state_queue_.push(EPaperState::TRANSFER_DATA);
|
||||
this->state_queue_.push(EPaperState::POWER_ON);
|
||||
this->state_queue_.push(EPaperState::REFRESH_SCREEN);
|
||||
this->state_queue_.push(EPaperState::POWER_OFF);
|
||||
this->state_queue_.push(EPaperState::DEEP_SLEEP);
|
||||
this->state_queue_.push(EPaperState::IDLE);
|
||||
|
||||
this->enable_loop();
|
||||
}
|
||||
|
||||
void EPaperBase::loop() {
|
||||
if (this->waiting_for_idle_) {
|
||||
if (this->is_idle_()) {
|
||||
this->waiting_for_idle_ = false;
|
||||
} else {
|
||||
if (App.get_loop_component_start_time() - this->waiting_for_idle_last_print_ >= 1000) {
|
||||
ESP_LOGV(TAG, "Waiting for idle");
|
||||
this->waiting_for_idle_last_print_ = App.get_loop_component_start_time();
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
auto state = this->state_queue_.front();
|
||||
|
||||
switch (state) {
|
||||
case EPaperState::IDLE:
|
||||
this->disable_loop();
|
||||
break;
|
||||
case EPaperState::UPDATE:
|
||||
this->do_update_(); // Calls ESPHome (current page) lambda
|
||||
break;
|
||||
case EPaperState::RESET:
|
||||
this->reset();
|
||||
break;
|
||||
case EPaperState::INITIALISE:
|
||||
this->initialise_();
|
||||
break;
|
||||
case EPaperState::TRANSFER_DATA:
|
||||
if (!this->transfer_data()) {
|
||||
return; // Not done yet, come back next loop
|
||||
}
|
||||
break;
|
||||
case EPaperState::POWER_ON:
|
||||
this->power_on();
|
||||
break;
|
||||
case EPaperState::REFRESH_SCREEN:
|
||||
this->refresh_screen();
|
||||
break;
|
||||
case EPaperState::POWER_OFF:
|
||||
this->power_off();
|
||||
break;
|
||||
case EPaperState::DEEP_SLEEP:
|
||||
this->deep_sleep();
|
||||
break;
|
||||
}
|
||||
this->state_queue_.pop();
|
||||
}
|
||||
|
||||
void EPaperBase::start_command_() {
|
||||
this->dc_pin_->digital_write(false);
|
||||
this->enable();
|
||||
}
|
||||
|
||||
void EPaperBase::end_command_() { this->disable(); }
|
||||
|
||||
void EPaperBase::start_data_() {
|
||||
this->dc_pin_->digital_write(true);
|
||||
this->enable();
|
||||
}
|
||||
void EPaperBase::end_data_() { this->disable(); }
|
||||
|
||||
void EPaperBase::on_safe_shutdown() { this->deep_sleep(); }
|
||||
|
||||
void EPaperBase::initialise_() {
|
||||
size_t index = 0;
|
||||
const auto &sequence = this->init_sequence_;
|
||||
const size_t sequence_size = this->init_sequence_length_;
|
||||
while (index != sequence_size) {
|
||||
if (sequence_size - index < 2) {
|
||||
this->mark_failed("Malformed init sequence");
|
||||
return;
|
||||
}
|
||||
const auto *ptr = sequence + index;
|
||||
const uint8_t length = ptr[1];
|
||||
if (sequence_size - index < length + 2) {
|
||||
this->mark_failed("Malformed init sequence");
|
||||
return;
|
||||
}
|
||||
|
||||
this->cmd_data(ptr);
|
||||
index += length + 2;
|
||||
}
|
||||
|
||||
this->power_on();
|
||||
}
|
||||
|
||||
} // namespace esphome::epaper_spi
|
93
esphome/components/epaper_spi/epaper_spi.h
Normal file
93
esphome/components/epaper_spi/epaper_spi.h
Normal file
@@ -0,0 +1,93 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/components/display/display_buffer.h"
|
||||
#include "esphome/components/spi/spi.h"
|
||||
#include "esphome/components/split_buffer/split_buffer.h"
|
||||
#include "esphome/core/component.h"
|
||||
|
||||
#include <queue>
|
||||
|
||||
namespace esphome::epaper_spi {
|
||||
|
||||
enum class EPaperState : uint8_t {
|
||||
IDLE,
|
||||
UPDATE,
|
||||
RESET,
|
||||
INITIALISE,
|
||||
TRANSFER_DATA,
|
||||
POWER_ON,
|
||||
REFRESH_SCREEN,
|
||||
POWER_OFF,
|
||||
DEEP_SLEEP,
|
||||
};
|
||||
|
||||
static const uint8_t MAX_TRANSFER_TIME = 10; // Transfer in 10ms blocks to allow the loop to run
|
||||
|
||||
class EPaperBase : public display::DisplayBuffer,
|
||||
public spi::SPIDevice<spi::BIT_ORDER_MSB_FIRST, spi::CLOCK_POLARITY_LOW, spi::CLOCK_PHASE_LEADING,
|
||||
spi::DATA_RATE_2MHZ> {
|
||||
public:
|
||||
EPaperBase(const uint8_t *init_sequence, const size_t init_sequence_length)
|
||||
: init_sequence_length_(init_sequence_length), init_sequence_(init_sequence) {}
|
||||
void set_dc_pin(GPIOPin *dc_pin) { dc_pin_ = dc_pin; }
|
||||
float get_setup_priority() const override;
|
||||
void set_reset_pin(GPIOPin *reset) { this->reset_pin_ = reset; }
|
||||
void set_busy_pin(GPIOPin *busy) { this->busy_pin_ = busy; }
|
||||
void set_reset_duration(uint32_t reset_duration) { this->reset_duration_ = reset_duration; }
|
||||
|
||||
void command(uint8_t value);
|
||||
void data(uint8_t value);
|
||||
void cmd_data(const uint8_t *data);
|
||||
|
||||
void update() override;
|
||||
void loop() override;
|
||||
|
||||
void setup() override;
|
||||
|
||||
void on_safe_shutdown() override;
|
||||
|
||||
protected:
|
||||
bool is_idle_();
|
||||
void setup_pins_();
|
||||
virtual void reset();
|
||||
void initialise_();
|
||||
bool init_buffer_(size_t buffer_length);
|
||||
|
||||
virtual int get_width_controller() { return this->get_width_internal(); };
|
||||
virtual void deep_sleep() = 0;
|
||||
/**
|
||||
* Send data to the device via SPI
|
||||
* @return true if done, false if should be called next loop
|
||||
*/
|
||||
virtual bool transfer_data() = 0;
|
||||
virtual void refresh_screen() = 0;
|
||||
|
||||
virtual void power_on() = 0;
|
||||
virtual void power_off() = 0;
|
||||
virtual uint32_t get_buffer_length() = 0;
|
||||
|
||||
void start_command_();
|
||||
void end_command_();
|
||||
void start_data_();
|
||||
void end_data_();
|
||||
|
||||
const size_t init_sequence_length_{0};
|
||||
|
||||
size_t current_data_index_{0};
|
||||
uint32_t reset_duration_{200};
|
||||
uint32_t waiting_for_idle_last_print_{0};
|
||||
|
||||
GPIOPin *dc_pin_;
|
||||
GPIOPin *busy_pin_{nullptr};
|
||||
GPIOPin *reset_pin_{nullptr};
|
||||
|
||||
const uint8_t *init_sequence_{nullptr};
|
||||
|
||||
bool waiting_for_idle_{false};
|
||||
|
||||
split_buffer::SplitBuffer buffer_;
|
||||
|
||||
std::queue<EPaperState> state_queue_{{EPaperState::IDLE}};
|
||||
};
|
||||
|
||||
} // namespace esphome::epaper_spi
|
@@ -0,0 +1,42 @@
|
||||
#include "epaper_spi_model_7p3in_spectra_e6.h"
|
||||
|
||||
namespace esphome::epaper_spi {
|
||||
|
||||
static constexpr const char *const TAG = "epaper_spi.7.3in-spectra-e6";
|
||||
|
||||
void EPaper7p3InSpectraE6::power_on() {
|
||||
ESP_LOGI(TAG, "Power on");
|
||||
this->command(0x04);
|
||||
this->waiting_for_idle_ = true;
|
||||
}
|
||||
|
||||
void EPaper7p3InSpectraE6::power_off() {
|
||||
ESP_LOGI(TAG, "Power off");
|
||||
this->command(0x02);
|
||||
this->data(0x00);
|
||||
this->waiting_for_idle_ = true;
|
||||
}
|
||||
|
||||
void EPaper7p3InSpectraE6::refresh_screen() {
|
||||
ESP_LOGI(TAG, "Refresh");
|
||||
this->command(0x12);
|
||||
this->data(0x00);
|
||||
this->waiting_for_idle_ = true;
|
||||
}
|
||||
|
||||
void EPaper7p3InSpectraE6::deep_sleep() {
|
||||
ESP_LOGI(TAG, "Deep sleep");
|
||||
this->command(0x07);
|
||||
this->data(0xA5);
|
||||
}
|
||||
|
||||
void EPaper7p3InSpectraE6::dump_config() {
|
||||
LOG_DISPLAY("", "E-Paper SPI", this);
|
||||
ESP_LOGCONFIG(TAG, " Model: 7.3in Spectra E6");
|
||||
LOG_PIN(" Reset Pin: ", this->reset_pin_);
|
||||
LOG_PIN(" DC Pin: ", this->dc_pin_);
|
||||
LOG_PIN(" Busy Pin: ", this->busy_pin_);
|
||||
LOG_UPDATE_INTERVAL(this);
|
||||
}
|
||||
|
||||
} // namespace esphome::epaper_spi
|
@@ -0,0 +1,45 @@
|
||||
#pragma once
|
||||
|
||||
#include "epaper_spi_spectra_e6.h"
|
||||
|
||||
namespace esphome::epaper_spi {
|
||||
|
||||
class EPaper7p3InSpectraE6 : public EPaperSpectraE6 {
|
||||
static constexpr const uint16_t WIDTH = 800;
|
||||
static constexpr const uint16_t HEIGHT = 480;
|
||||
// clang-format off
|
||||
|
||||
// Command, data length, data
|
||||
static constexpr uint8_t INIT_SEQUENCE[] = {
|
||||
0xAA, 6, 0x49, 0x55, 0x20, 0x08, 0x09, 0x18,
|
||||
0x01, 1, 0x3F,
|
||||
0x00, 2, 0x5F, 0x69,
|
||||
0x03, 4, 0x00, 0x54, 0x00, 0x44,
|
||||
0x05, 4, 0x40, 0x1F, 0x1F, 0x2C,
|
||||
0x06, 4, 0x6F, 0x1F, 0x17, 0x49,
|
||||
0x08, 4, 0x6F, 0x1F, 0x1F, 0x22,
|
||||
0x30, 1, 0x03,
|
||||
0x50, 1, 0x3F,
|
||||
0x60, 2, 0x02, 0x00,
|
||||
0x61, 4, WIDTH / 256, WIDTH % 256, HEIGHT / 256, HEIGHT % 256,
|
||||
0x84, 1, 0x01,
|
||||
0xE3, 1, 0x2F,
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
public:
|
||||
EPaper7p3InSpectraE6() : EPaperSpectraE6(INIT_SEQUENCE, sizeof(INIT_SEQUENCE)) {}
|
||||
|
||||
void dump_config() override;
|
||||
|
||||
protected:
|
||||
int get_width_internal() override { return WIDTH; };
|
||||
int get_height_internal() override { return HEIGHT; };
|
||||
|
||||
void refresh_screen() override;
|
||||
void power_on() override;
|
||||
void power_off() override;
|
||||
void deep_sleep() override;
|
||||
};
|
||||
|
||||
} // namespace esphome::epaper_spi
|
135
esphome/components/epaper_spi/epaper_spi_spectra_e6.cpp
Normal file
135
esphome/components/epaper_spi/epaper_spi_spectra_e6.cpp
Normal file
@@ -0,0 +1,135 @@
|
||||
#include "epaper_spi_spectra_e6.h"
|
||||
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome::epaper_spi {
|
||||
|
||||
static constexpr const char *const TAG = "epaper_spi.6c";
|
||||
|
||||
static inline uint8_t color_to_hex(Color color) {
|
||||
if (color.red > 127) {
|
||||
if (color.green > 170) {
|
||||
if (color.blue > 127) {
|
||||
return 0x1; // White
|
||||
} else {
|
||||
return 0x2; // Yellow
|
||||
}
|
||||
} else {
|
||||
return 0x3; // Red (or Magenta)
|
||||
}
|
||||
} else {
|
||||
if (color.green > 127) {
|
||||
if (color.blue > 127) {
|
||||
return 0x5; // Cyan -> Blue
|
||||
} else {
|
||||
return 0x6; // Green
|
||||
}
|
||||
} else {
|
||||
if (color.blue > 127) {
|
||||
return 0x5; // Blue
|
||||
} else {
|
||||
return 0x0; // Black
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void EPaperSpectraE6::fill(Color color) {
|
||||
uint8_t pixel_color;
|
||||
if (color.is_on()) {
|
||||
pixel_color = color_to_hex(color);
|
||||
} else {
|
||||
pixel_color = 0x1;
|
||||
}
|
||||
|
||||
// We store 8 bitset<3> in 3 bytes
|
||||
// | byte 1 | byte 2 | byte 3 |
|
||||
// |aaabbbaa|abbbaaab|bbaaabbb|
|
||||
uint8_t byte_1 = pixel_color << 5 | pixel_color << 2 | pixel_color >> 1;
|
||||
uint8_t byte_2 = pixel_color << 7 | pixel_color << 4 | pixel_color << 1 | pixel_color >> 2;
|
||||
uint8_t byte_3 = pixel_color << 6 | pixel_color << 3 | pixel_color << 0;
|
||||
|
||||
const size_t buffer_length = this->get_buffer_length();
|
||||
for (size_t i = 0; i < buffer_length; i += 3) {
|
||||
this->buffer_[i + 0] = byte_1;
|
||||
this->buffer_[i + 1] = byte_2;
|
||||
this->buffer_[i + 2] = byte_3;
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t EPaperSpectraE6::get_buffer_length() {
|
||||
// 6 colors buffer, 1 pixel = 3 bits, we will store 8 pixels in 24 bits = 3 bytes
|
||||
return this->get_width_controller() * this->get_height_internal() / 8u * 3u;
|
||||
}
|
||||
|
||||
void HOT EPaperSpectraE6::draw_absolute_pixel_internal(int x, int y, Color color) {
|
||||
if (x >= this->get_width_internal() || y >= this->get_height_internal() || x < 0 || y < 0)
|
||||
return;
|
||||
|
||||
uint8_t pixel_bits = color_to_hex(color);
|
||||
uint32_t pixel_position = x + y * this->get_width_controller();
|
||||
uint32_t first_bit_position = pixel_position * 3;
|
||||
uint32_t byte_position = first_bit_position / 8u;
|
||||
uint32_t byte_subposition = first_bit_position % 8u;
|
||||
|
||||
if (byte_subposition <= 5) {
|
||||
this->buffer_[byte_position] = (this->buffer_[byte_position] & (0xFF ^ (0b111 << (5 - byte_subposition)))) |
|
||||
(pixel_bits << (5 - byte_subposition));
|
||||
} else {
|
||||
this->buffer_[byte_position] = (this->buffer_[byte_position] & (0xFF ^ (0b111 >> (byte_subposition - 5)))) |
|
||||
(pixel_bits >> (byte_subposition - 5));
|
||||
|
||||
this->buffer_[byte_position + 1] =
|
||||
(this->buffer_[byte_position + 1] & (0xFF ^ (0xFF & (0b111 << (13 - byte_subposition))))) |
|
||||
(pixel_bits << (13 - byte_subposition));
|
||||
}
|
||||
}
|
||||
|
||||
bool HOT EPaperSpectraE6::transfer_data() {
|
||||
const uint32_t start_time = App.get_loop_component_start_time();
|
||||
if (this->current_data_index_ == 0) {
|
||||
ESP_LOGV(TAG, "Sending data");
|
||||
this->command(0x10);
|
||||
}
|
||||
|
||||
uint8_t bytes_to_send[4]{0};
|
||||
const size_t buffer_length = this->get_buffer_length();
|
||||
for (size_t i = this->current_data_index_; i < buffer_length; i += 3) {
|
||||
const uint32_t triplet = encode_uint24(this->buffer_[i + 0], this->buffer_[i + 1], this->buffer_[i + 2]);
|
||||
// 8 pixels are stored in 3 bytes
|
||||
// |aaabbbaa|abbbaaab|bbaaabbb|
|
||||
// | byte 1 | byte 2 | byte 3 |
|
||||
bytes_to_send[0] = ((triplet >> 17) & 0b01110000) | ((triplet >> 18) & 0b00000111);
|
||||
bytes_to_send[1] = ((triplet >> 11) & 0b01110000) | ((triplet >> 12) & 0b00000111);
|
||||
bytes_to_send[2] = ((triplet >> 5) & 0b01110000) | ((triplet >> 6) & 0b00000111);
|
||||
bytes_to_send[3] = ((triplet << 1) & 0b01110000) | ((triplet << 0) & 0b00000111);
|
||||
|
||||
this->start_data_();
|
||||
this->write_array(bytes_to_send, sizeof(bytes_to_send));
|
||||
this->end_data_();
|
||||
|
||||
if (millis() - start_time > MAX_TRANSFER_TIME) {
|
||||
// Let the main loop run and come back next loop
|
||||
this->current_data_index_ = i + 3;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// Finished the entire dataset
|
||||
this->current_data_index_ = 0;
|
||||
return true;
|
||||
}
|
||||
|
||||
void EPaperSpectraE6::reset() {
|
||||
if (this->reset_pin_ != nullptr) {
|
||||
this->disable_loop();
|
||||
this->reset_pin_->digital_write(true);
|
||||
this->set_timeout(20, [this] {
|
||||
this->reset_pin_->digital_write(false);
|
||||
delay(2);
|
||||
this->reset_pin_->digital_write(true);
|
||||
this->set_timeout(20, [this] { this->enable_loop(); });
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace esphome::epaper_spi
|
23
esphome/components/epaper_spi/epaper_spi_spectra_e6.h
Normal file
23
esphome/components/epaper_spi/epaper_spi_spectra_e6.h
Normal file
@@ -0,0 +1,23 @@
|
||||
#pragma once
|
||||
|
||||
#include "epaper_spi.h"
|
||||
|
||||
namespace esphome::epaper_spi {
|
||||
|
||||
class EPaperSpectraE6 : public EPaperBase {
|
||||
public:
|
||||
EPaperSpectraE6(const uint8_t *init_sequence, const size_t init_sequence_length)
|
||||
: EPaperBase(init_sequence, init_sequence_length) {}
|
||||
|
||||
display::DisplayType get_display_type() override { return display::DisplayType::DISPLAY_TYPE_COLOR; }
|
||||
void fill(Color color) override;
|
||||
|
||||
protected:
|
||||
void draw_absolute_pixel_internal(int x, int y, Color color) override;
|
||||
uint32_t get_buffer_length() override;
|
||||
|
||||
bool transfer_data() override;
|
||||
void reset() override;
|
||||
};
|
||||
|
||||
} // namespace esphome::epaper_spi
|
@@ -304,6 +304,17 @@ def _format_framework_espidf_version(ver: cv.Version, release: str) -> str:
|
||||
return f"pioarduino/framework-espidf@https://github.com/pioarduino/esp-idf/releases/download/v{str(ver)}/esp-idf-v{str(ver)}.zip"
|
||||
|
||||
|
||||
def _is_framework_url(source: str) -> str:
|
||||
# platformio accepts many URL schemes for framework repositories and archives including http, https, git, file, and symlink
|
||||
import urllib.parse
|
||||
|
||||
try:
|
||||
parsed = urllib.parse.urlparse(source)
|
||||
except ValueError:
|
||||
return False
|
||||
return bool(parsed.scheme)
|
||||
|
||||
|
||||
# NOTE: Keep this in mind when updating the recommended version:
|
||||
# * New framework historically have had some regressions, especially for WiFi.
|
||||
# The new version needs to be thoroughly validated before changing the
|
||||
@@ -314,11 +325,12 @@ def _format_framework_espidf_version(ver: cv.Version, release: str) -> str:
|
||||
# - https://github.com/espressif/arduino-esp32/releases
|
||||
ARDUINO_FRAMEWORK_VERSION_LOOKUP = {
|
||||
"recommended": cv.Version(3, 2, 1),
|
||||
"latest": cv.Version(3, 3, 1),
|
||||
"dev": cv.Version(3, 3, 1),
|
||||
"latest": cv.Version(3, 3, 2),
|
||||
"dev": cv.Version(3, 3, 2),
|
||||
}
|
||||
ARDUINO_PLATFORM_VERSION_LOOKUP = {
|
||||
cv.Version(3, 3, 1): cv.Version(55, 3, 31),
|
||||
cv.Version(3, 3, 2): cv.Version(55, 3, 31, "1"),
|
||||
cv.Version(3, 3, 1): cv.Version(55, 3, 31, "1"),
|
||||
cv.Version(3, 3, 0): cv.Version(55, 3, 30, "2"),
|
||||
cv.Version(3, 2, 1): cv.Version(54, 3, 21, "2"),
|
||||
cv.Version(3, 2, 0): cv.Version(54, 3, 20),
|
||||
@@ -336,8 +348,8 @@ ESP_IDF_FRAMEWORK_VERSION_LOOKUP = {
|
||||
"dev": cv.Version(5, 5, 1),
|
||||
}
|
||||
ESP_IDF_PLATFORM_VERSION_LOOKUP = {
|
||||
cv.Version(5, 5, 1): cv.Version(55, 3, 31),
|
||||
cv.Version(5, 5, 0): cv.Version(55, 3, 31),
|
||||
cv.Version(5, 5, 1): cv.Version(55, 3, 31, "1"),
|
||||
cv.Version(5, 5, 0): cv.Version(55, 3, 31, "1"),
|
||||
cv.Version(5, 4, 2): cv.Version(54, 3, 21, "2"),
|
||||
cv.Version(5, 4, 1): cv.Version(54, 3, 21, "2"),
|
||||
cv.Version(5, 4, 0): cv.Version(54, 3, 21, "2"),
|
||||
@@ -352,8 +364,8 @@ ESP_IDF_PLATFORM_VERSION_LOOKUP = {
|
||||
# - https://github.com/pioarduino/platform-espressif32/releases
|
||||
PLATFORM_VERSION_LOOKUP = {
|
||||
"recommended": cv.Version(54, 3, 21, "2"),
|
||||
"latest": cv.Version(55, 3, 31),
|
||||
"dev": "https://github.com/pioarduino/platform-espressif32.git#develop",
|
||||
"latest": cv.Version(55, 3, 31, "1"),
|
||||
"dev": cv.Version(55, 3, 31, "1"),
|
||||
}
|
||||
|
||||
|
||||
@@ -386,6 +398,10 @@ def _check_versions(value):
|
||||
value[CONF_SOURCE] = value.get(
|
||||
CONF_SOURCE, _format_framework_arduino_version(version)
|
||||
)
|
||||
if _is_framework_url(value[CONF_SOURCE]):
|
||||
value[CONF_SOURCE] = (
|
||||
f"pioarduino/framework-arduinoespressif32@{value[CONF_SOURCE]}"
|
||||
)
|
||||
else:
|
||||
if version < cv.Version(5, 0, 0):
|
||||
raise cv.Invalid("Only ESP-IDF 5.0+ is supported.")
|
||||
@@ -395,6 +411,8 @@ def _check_versions(value):
|
||||
CONF_SOURCE,
|
||||
_format_framework_espidf_version(version, value.get(CONF_RELEASE, None)),
|
||||
)
|
||||
if _is_framework_url(value[CONF_SOURCE]):
|
||||
value[CONF_SOURCE] = f"pioarduino/framework-espidf@{value[CONF_SOURCE]}"
|
||||
|
||||
if CONF_PLATFORM_VERSION not in value:
|
||||
if platform_lookup is None:
|
||||
@@ -639,6 +657,7 @@ def _show_framework_migration_message(name: str, variant: str) -> None:
|
||||
+ "Why change? ESP-IDF offers:\n"
|
||||
+ color(AnsiFore.GREEN, " ✨ Up to 40% smaller binaries\n")
|
||||
+ color(AnsiFore.GREEN, " 🚀 Better performance and optimization\n")
|
||||
+ color(AnsiFore.GREEN, " ⚡ 2-3x faster compile times\n")
|
||||
+ color(AnsiFore.GREEN, " 📦 Custom-built firmware for your exact needs\n")
|
||||
+ color(
|
||||
AnsiFore.GREEN,
|
||||
@@ -646,7 +665,6 @@ def _show_framework_migration_message(name: str, variant: str) -> None:
|
||||
)
|
||||
+ "\n"
|
||||
+ "Trade-offs:\n"
|
||||
+ color(AnsiFore.YELLOW, " ⏱️ Compile times are ~25% longer\n")
|
||||
+ color(AnsiFore.YELLOW, " 🔄 Some components need migration\n")
|
||||
+ "\n"
|
||||
+ "What should I do?\n"
|
||||
|
@@ -285,6 +285,10 @@ def consume_connection_slots(
|
||||
|
||||
def validate_connection_slots(max_connections: int) -> None:
|
||||
"""Validate that BLE connection slots don't exceed the configured maximum."""
|
||||
# Skip validation in testing mode to allow component grouping
|
||||
if CORE.testing_mode:
|
||||
return
|
||||
|
||||
ble_data = CORE.data.get(KEY_ESP32_BLE, {})
|
||||
used_slots = ble_data.get(KEY_USED_CONNECTION_SLOTS, [])
|
||||
num_used = len(used_slots)
|
||||
@@ -332,12 +336,16 @@ def final_validation(config):
|
||||
|
||||
# Check if BLE Server is needed
|
||||
has_ble_server = "esp32_ble_server" in full_config
|
||||
add_idf_sdkconfig_option("CONFIG_BT_GATTS_ENABLE", has_ble_server)
|
||||
|
||||
# Check if BLE Client is needed (via esp32_ble_tracker or esp32_ble_client)
|
||||
has_ble_client = (
|
||||
"esp32_ble_tracker" in full_config or "esp32_ble_client" in full_config
|
||||
)
|
||||
|
||||
# ESP-IDF BLE stack requires GATT Server to be enabled when GATT Client is enabled
|
||||
# This is an internal dependency in the Bluedroid stack (tested ESP-IDF 5.4.2-5.5.1)
|
||||
# See: https://github.com/espressif/esp-idf/issues/17724
|
||||
add_idf_sdkconfig_option("CONFIG_BT_GATTS_ENABLE", has_ble_server or has_ble_client)
|
||||
add_idf_sdkconfig_option("CONFIG_BT_GATTC_ENABLE", has_ble_client)
|
||||
|
||||
# Handle max_connections: check for deprecated location in esp32_ble_tracker
|
||||
|
@@ -68,10 +68,6 @@ void ESP32BLE::advertising_set_service_data(const std::vector<uint8_t> &data) {
|
||||
}
|
||||
|
||||
void ESP32BLE::advertising_set_manufacturer_data(const std::vector<uint8_t> &data) {
|
||||
this->advertising_set_manufacturer_data(std::span<const uint8_t>(data));
|
||||
}
|
||||
|
||||
void ESP32BLE::advertising_set_manufacturer_data(std::span<const uint8_t> data) {
|
||||
this->advertising_init_();
|
||||
this->advertising_->set_manufacturer_data(data);
|
||||
this->advertising_start();
|
||||
|
@@ -118,7 +118,6 @@ class ESP32BLE : public Component {
|
||||
void advertising_start();
|
||||
void advertising_set_service_data(const std::vector<uint8_t> &data);
|
||||
void advertising_set_manufacturer_data(const std::vector<uint8_t> &data);
|
||||
void advertising_set_manufacturer_data(std::span<const uint8_t> data);
|
||||
void advertising_set_appearance(uint16_t appearance) { this->appearance_ = appearance; }
|
||||
void advertising_set_service_data_and_name(std::span<const uint8_t> data, bool include_name);
|
||||
void advertising_add_service_uuid(ESPBTUUID uuid);
|
||||
|
@@ -59,10 +59,6 @@ void BLEAdvertising::set_service_data(const std::vector<uint8_t> &data) {
|
||||
}
|
||||
|
||||
void BLEAdvertising::set_manufacturer_data(const std::vector<uint8_t> &data) {
|
||||
this->set_manufacturer_data(std::span<const uint8_t>(data));
|
||||
}
|
||||
|
||||
void BLEAdvertising::set_manufacturer_data(std::span<const uint8_t> data) {
|
||||
delete[] this->advertising_data_.p_manufacturer_data;
|
||||
this->advertising_data_.p_manufacturer_data = nullptr;
|
||||
this->advertising_data_.manufacturer_len = data.size();
|
||||
|
@@ -35,7 +35,6 @@ class BLEAdvertising {
|
||||
void set_scan_response(bool scan_response) { this->scan_response_ = scan_response; }
|
||||
void set_min_preferred_interval(uint16_t interval) { this->advertising_data_.min_interval = interval; }
|
||||
void set_manufacturer_data(const std::vector<uint8_t> &data);
|
||||
void set_manufacturer_data(std::span<const uint8_t> data);
|
||||
void set_appearance(uint16_t appearance) { this->advertising_data_.appearance = appearance; }
|
||||
void set_service_data(const std::vector<uint8_t> &data);
|
||||
void set_service_data(std::span<const uint8_t> data);
|
||||
|
@@ -1,6 +1,5 @@
|
||||
#include "esp32_ble_beacon.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
#ifdef USE_ESP32
|
||||
|
||||
@@ -15,10 +14,6 @@
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
#include <esp32-hal-bt.h>
|
||||
#endif
|
||||
|
||||
namespace esphome {
|
||||
namespace esp32_ble_beacon {
|
||||
|
||||
|
@@ -15,10 +15,7 @@ Trigger<std::vector<uint8_t>, uint16_t> *BLETriggers::create_characteristic_on_w
|
||||
Trigger<std::vector<uint8_t>, uint16_t> *on_write_trigger = // NOLINT(cppcoreguidelines-owning-memory)
|
||||
new Trigger<std::vector<uint8_t>, uint16_t>();
|
||||
characteristic->on_write([on_write_trigger](std::span<const uint8_t> data, uint16_t id) {
|
||||
// Convert span to vector for trigger - copy is necessary because:
|
||||
// 1. Trigger stores the data for use in automation actions that execute later
|
||||
// 2. The span is only valid during this callback (points to temporary BLE stack data)
|
||||
// 3. User lambdas in automations need persistent data they can access asynchronously
|
||||
// Convert span to vector for trigger
|
||||
on_write_trigger->trigger(std::vector<uint8_t>(data.begin(), data.end()), id);
|
||||
});
|
||||
return on_write_trigger;
|
||||
@@ -30,10 +27,7 @@ Trigger<std::vector<uint8_t>, uint16_t> *BLETriggers::create_descriptor_on_write
|
||||
Trigger<std::vector<uint8_t>, uint16_t> *on_write_trigger = // NOLINT(cppcoreguidelines-owning-memory)
|
||||
new Trigger<std::vector<uint8_t>, uint16_t>();
|
||||
descriptor->on_write([on_write_trigger](std::span<const uint8_t> data, uint16_t id) {
|
||||
// Convert span to vector for trigger - copy is necessary because:
|
||||
// 1. Trigger stores the data for use in automation actions that execute later
|
||||
// 2. The span is only valid during this callback (points to temporary BLE stack data)
|
||||
// 3. User lambdas in automations need persistent data they can access asynchronously
|
||||
// Convert span to vector for trigger
|
||||
on_write_trigger->trigger(std::vector<uint8_t>(data.begin(), data.end()), id);
|
||||
});
|
||||
return on_write_trigger;
|
||||
|
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from esphome import automation
|
||||
@@ -52,9 +53,19 @@ class BLEFeatures(StrEnum):
|
||||
ESP_BT_DEVICE = "ESP_BT_DEVICE"
|
||||
|
||||
|
||||
# Dataclass for registration counts
|
||||
@dataclass
|
||||
class RegistrationCounts:
|
||||
listeners: int = 0
|
||||
clients: int = 0
|
||||
|
||||
|
||||
# Set to track which features are needed by components
|
||||
_required_features: set[BLEFeatures] = set()
|
||||
|
||||
# Track registration counts for StaticVector sizing
|
||||
_registration_counts = RegistrationCounts()
|
||||
|
||||
|
||||
def register_ble_features(features: set[BLEFeatures]) -> None:
|
||||
"""Register BLE features that a component needs.
|
||||
@@ -257,12 +268,14 @@ async def to_code(config):
|
||||
register_ble_features({BLEFeatures.ESP_BT_DEVICE})
|
||||
|
||||
for conf in config.get(CONF_ON_BLE_ADVERTISE, []):
|
||||
_registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if CONF_MAC_ADDRESS in conf:
|
||||
addr_list = [it.as_hex for it in conf[CONF_MAC_ADDRESS]]
|
||||
cg.add(trigger.set_addresses(addr_list))
|
||||
await automation.build_automation(trigger, [(ESPBTDeviceConstRef, "x")], conf)
|
||||
for conf in config.get(CONF_ON_BLE_SERVICE_DATA_ADVERTISE, []):
|
||||
_registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if len(conf[CONF_SERVICE_UUID]) == len(bt_uuid16_format):
|
||||
cg.add(trigger.set_service_uuid16(as_hex(conf[CONF_SERVICE_UUID])))
|
||||
@@ -275,6 +288,7 @@ async def to_code(config):
|
||||
cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex))
|
||||
await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf)
|
||||
for conf in config.get(CONF_ON_BLE_MANUFACTURER_DATA_ADVERTISE, []):
|
||||
_registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
if len(conf[CONF_MANUFACTURER_ID]) == len(bt_uuid16_format):
|
||||
cg.add(trigger.set_manufacturer_uuid16(as_hex(conf[CONF_MANUFACTURER_ID])))
|
||||
@@ -287,6 +301,7 @@ async def to_code(config):
|
||||
cg.add(trigger.set_address(conf[CONF_MAC_ADDRESS].as_hex))
|
||||
await automation.build_automation(trigger, [(adv_data_t_const_ref, "x")], conf)
|
||||
for conf in config.get(CONF_ON_SCAN_END, []):
|
||||
_registration_counts.listeners += 1
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
|
||||
@@ -320,6 +335,17 @@ async def _add_ble_features():
|
||||
cg.add_define("USE_ESP32_BLE_DEVICE")
|
||||
cg.add_define("USE_ESP32_BLE_UUID")
|
||||
|
||||
# Add defines for StaticVector sizing based on registration counts
|
||||
# Only define if count > 0 to avoid allocating unnecessary memory
|
||||
if _registration_counts.listeners > 0:
|
||||
cg.add_define(
|
||||
"ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT", _registration_counts.listeners
|
||||
)
|
||||
if _registration_counts.clients > 0:
|
||||
cg.add_define(
|
||||
"ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT", _registration_counts.clients
|
||||
)
|
||||
|
||||
|
||||
ESP32_BLE_START_SCAN_ACTION_SCHEMA = cv.Schema(
|
||||
{
|
||||
@@ -369,6 +395,7 @@ async def register_ble_device(
|
||||
var: cg.SafeExpType, config: ConfigType
|
||||
) -> cg.SafeExpType:
|
||||
register_ble_features({BLEFeatures.ESP_BT_DEVICE})
|
||||
_registration_counts.listeners += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_listener(var))
|
||||
return var
|
||||
@@ -376,6 +403,7 @@ async def register_ble_device(
|
||||
|
||||
async def register_client(var: cg.SafeExpType, config: ConfigType) -> cg.SafeExpType:
|
||||
register_ble_features({BLEFeatures.ESP_BT_DEVICE})
|
||||
_registration_counts.clients += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_client(var))
|
||||
return var
|
||||
@@ -389,6 +417,7 @@ async def register_raw_ble_device(
|
||||
This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice
|
||||
will not be compiled in if this is the only registration method used.
|
||||
"""
|
||||
_registration_counts.listeners += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_listener(var))
|
||||
return var
|
||||
@@ -402,6 +431,7 @@ async def register_raw_client(
|
||||
This does NOT register the ESP_BT_DEVICE feature, meaning ESPBTDevice
|
||||
will not be compiled in if this is the only registration method used.
|
||||
"""
|
||||
_registration_counts.clients += 1
|
||||
paren = await cg.get_variable(config[CONF_ESP32_BLE_ID])
|
||||
cg.add(paren.register_client(var))
|
||||
return var
|
||||
|
@@ -25,10 +25,6 @@
|
||||
#include <esp_coexist.h>
|
||||
#endif
|
||||
|
||||
#ifdef USE_ARDUINO
|
||||
#include <esp32-hal-bt.h>
|
||||
#endif
|
||||
|
||||
#define MBEDTLS_AES_ALT
|
||||
#include <aes_alt.h>
|
||||
|
||||
@@ -78,9 +74,11 @@ void ESP32BLETracker::setup() {
|
||||
[this](ota::OTAState state, float progress, uint8_t error, ota::OTAComponent *comp) {
|
||||
if (state == ota::OTA_STARTED) {
|
||||
this->stop_scan();
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
for (auto *client : this->clients_) {
|
||||
client->disconnect();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
});
|
||||
#endif
|
||||
@@ -210,8 +208,10 @@ void ESP32BLETracker::start_scan_(bool first) {
|
||||
this->set_scanner_state_(ScannerState::STARTING);
|
||||
ESP_LOGD(TAG, "Starting scan, set scanner state to STARTING.");
|
||||
if (!first) {
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
|
||||
for (auto *listener : this->listeners_)
|
||||
listener->on_scan_end();
|
||||
#endif
|
||||
}
|
||||
#ifdef USE_ESP32_BLE_DEVICE
|
||||
this->already_discovered_.clear();
|
||||
@@ -240,20 +240,25 @@ void ESP32BLETracker::start_scan_(bool first) {
|
||||
}
|
||||
|
||||
void ESP32BLETracker::register_client(ESPBTClient *client) {
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
client->app_id = ++this->app_id_;
|
||||
this->clients_.push_back(client);
|
||||
this->recalculate_advertisement_parser_types();
|
||||
#endif
|
||||
}
|
||||
|
||||
void ESP32BLETracker::register_listener(ESPBTDeviceListener *listener) {
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
|
||||
listener->set_parent(this);
|
||||
this->listeners_.push_back(listener);
|
||||
this->recalculate_advertisement_parser_types();
|
||||
#endif
|
||||
}
|
||||
|
||||
void ESP32BLETracker::recalculate_advertisement_parser_types() {
|
||||
this->raw_advertisements_ = false;
|
||||
this->parse_advertisements_ = false;
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
|
||||
for (auto *listener : this->listeners_) {
|
||||
if (listener->get_advertisement_parser_type() == AdvertisementParserType::PARSED_ADVERTISEMENTS) {
|
||||
this->parse_advertisements_ = true;
|
||||
@@ -261,6 +266,8 @@ void ESP32BLETracker::recalculate_advertisement_parser_types() {
|
||||
this->raw_advertisements_ = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
for (auto *client : this->clients_) {
|
||||
if (client->get_advertisement_parser_type() == AdvertisementParserType::PARSED_ADVERTISEMENTS) {
|
||||
this->parse_advertisements_ = true;
|
||||
@@ -268,6 +275,7 @@ void ESP32BLETracker::recalculate_advertisement_parser_types() {
|
||||
this->raw_advertisements_ = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void ESP32BLETracker::gap_event_handler(esp_gap_ble_cb_event_t event, esp_ble_gap_cb_param_t *param) {
|
||||
@@ -286,10 +294,12 @@ void ESP32BLETracker::gap_event_handler(esp_gap_ble_cb_event_t event, esp_ble_ga
|
||||
default:
|
||||
break;
|
||||
}
|
||||
// Forward all events to clients (scan results are handled separately via gap_scan_event_handler)
|
||||
// Forward all events to clients (scan results are handled separately via gap_scan_event_handler)
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
for (auto *client : this->clients_) {
|
||||
client->gap_event_handler(event, param);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void ESP32BLETracker::gap_scan_event_handler(const BLEScanResult &scan_result) {
|
||||
@@ -352,9 +362,11 @@ void ESP32BLETracker::gap_scan_stop_complete_(const esp_ble_gap_cb_param_t::ble_
|
||||
|
||||
void ESP32BLETracker::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t gattc_if,
|
||||
esp_ble_gattc_cb_param_t *param) {
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
for (auto *client : this->clients_) {
|
||||
client->gattc_event_handler(event, gattc_if, param);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void ESP32BLETracker::set_scanner_state_(ScannerState state) {
|
||||
@@ -708,12 +720,16 @@ bool ESPBTDevice::resolve_irk(const uint8_t *irk) const {
|
||||
void ESP32BLETracker::process_scan_result_(const BLEScanResult &scan_result) {
|
||||
// Process raw advertisements
|
||||
if (this->raw_advertisements_) {
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
|
||||
for (auto *listener : this->listeners_) {
|
||||
listener->parse_devices(&scan_result, 1);
|
||||
}
|
||||
#endif
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
for (auto *client : this->clients_) {
|
||||
client->parse_devices(&scan_result, 1);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Process parsed advertisements
|
||||
@@ -723,16 +739,20 @@ void ESP32BLETracker::process_scan_result_(const BLEScanResult &scan_result) {
|
||||
device.parse_scan_rst(scan_result);
|
||||
|
||||
bool found = false;
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
|
||||
for (auto *listener : this->listeners_) {
|
||||
if (listener->parse_device(device))
|
||||
found = true;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
for (auto *client : this->clients_) {
|
||||
if (client->parse_device(device)) {
|
||||
found = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!found && !this->scan_continuous_) {
|
||||
this->print_bt_device_info(device);
|
||||
@@ -749,8 +769,10 @@ void ESP32BLETracker::cleanup_scan_state_(bool is_stop_complete) {
|
||||
// Reset timeout state machine instead of cancelling scheduler timeout
|
||||
this->scan_timeout_state_ = ScanTimeoutState::INACTIVE;
|
||||
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
|
||||
for (auto *listener : this->listeners_)
|
||||
listener->on_scan_end();
|
||||
#endif
|
||||
|
||||
this->set_scanner_state_(ScannerState::IDLE);
|
||||
}
|
||||
@@ -774,6 +796,7 @@ void ESP32BLETracker::handle_scanner_failure_() {
|
||||
|
||||
void ESP32BLETracker::try_promote_discovered_clients_() {
|
||||
// Only promote the first discovered client to avoid multiple simultaneous connections
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
for (auto *client : this->clients_) {
|
||||
if (client->state() != ClientState::DISCOVERED) {
|
||||
continue;
|
||||
@@ -795,6 +818,7 @@ void ESP32BLETracker::try_promote_discovered_clients_() {
|
||||
client->connect();
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
const char *ESP32BLETracker::scanner_state_to_string_(ScannerState state) const {
|
||||
|
@@ -302,6 +302,7 @@ class ESP32BLETracker : public Component,
|
||||
/// Count clients in each state
|
||||
ClientStateCounts count_client_states_() const {
|
||||
ClientStateCounts counts;
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
for (auto *client : this->clients_) {
|
||||
switch (client->state()) {
|
||||
case ClientState::DISCONNECTING:
|
||||
@@ -317,12 +318,17 @@ class ESP32BLETracker : public Component,
|
||||
break;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return counts;
|
||||
}
|
||||
|
||||
// Group 1: Large objects (12+ bytes) - vectors and callback manager
|
||||
std::vector<ESPBTDeviceListener *> listeners_;
|
||||
std::vector<ESPBTClient *> clients_;
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT
|
||||
StaticVector<ESPBTDeviceListener *, ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT> listeners_;
|
||||
#endif
|
||||
#ifdef ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT
|
||||
StaticVector<ESPBTClient *, ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT> clients_;
|
||||
#endif
|
||||
CallbackManager<void(ScannerState)> scanner_state_callbacks_;
|
||||
#ifdef USE_ESP32_BLE_DEVICE
|
||||
/// Vector of addresses that have already been printed in print_bt_device_info
|
||||
|
@@ -107,7 +107,7 @@ void IDFI2CBus::dump_config() {
|
||||
if (s.second) {
|
||||
ESP_LOGCONFIG(TAG, "Found device at address 0x%02X", s.first);
|
||||
} else {
|
||||
ESP_LOGCONFIG(TAG, "Unknown error at address 0x%02X", s.first);
|
||||
ESP_LOGE(TAG, "Unknown error at address 0x%02X", s.first);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -8,6 +8,13 @@ namespace json {
|
||||
|
||||
static const char *const TAG = "json";
|
||||
|
||||
#ifdef USE_PSRAM
|
||||
// Global allocator that outlives all JsonDocuments returned by parse_json()
|
||||
// This prevents dangling pointer issues when JsonDocuments are returned from functions
|
||||
// NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables) - Must be mutable for ArduinoJson::Allocator
|
||||
static SpiRamAllocator global_json_allocator;
|
||||
#endif
|
||||
|
||||
std::string build_json(const json_build_t &f) {
|
||||
// NOLINTBEGIN(clang-analyzer-cplusplus.NewDeleteLeaks) false positive with ArduinoJson
|
||||
JsonBuilder builder;
|
||||
@@ -33,8 +40,7 @@ JsonDocument parse_json(const uint8_t *data, size_t len) {
|
||||
return JsonObject(); // return unbound object
|
||||
}
|
||||
#ifdef USE_PSRAM
|
||||
auto doc_allocator = SpiRamAllocator();
|
||||
JsonDocument json_document(&doc_allocator);
|
||||
JsonDocument json_document(&global_json_allocator);
|
||||
#else
|
||||
JsonDocument json_document;
|
||||
#endif
|
||||
|
@@ -56,7 +56,7 @@ void MCP23016::pin_mode(uint8_t pin, gpio::Flags flags) {
|
||||
this->update_reg_(pin, false, iodir);
|
||||
}
|
||||
}
|
||||
float MCP23016::get_setup_priority() const { return setup_priority::IO; }
|
||||
float MCP23016::get_setup_priority() const { return setup_priority::HARDWARE; }
|
||||
bool MCP23016::read_reg_(uint8_t reg, uint8_t *value) {
|
||||
if (this->is_failed())
|
||||
return false;
|
||||
|
@@ -21,11 +21,11 @@ template<uint8_t N> class MCP23XXXBase : public Component, public gpio_expander:
|
||||
|
||||
protected:
|
||||
// read a given register
|
||||
virtual bool read_reg(uint8_t reg, uint8_t *value);
|
||||
virtual bool read_reg(uint8_t reg, uint8_t *value) = 0;
|
||||
// write a value to a given register
|
||||
virtual bool write_reg(uint8_t reg, uint8_t value);
|
||||
virtual bool write_reg(uint8_t reg, uint8_t value) = 0;
|
||||
// update registers with given pin value.
|
||||
virtual void update_reg(uint8_t pin, bool pin_value, uint8_t reg_a);
|
||||
virtual void update_reg(uint8_t pin, bool pin_value, uint8_t reg_a) = 0;
|
||||
|
||||
bool open_drain_ints_;
|
||||
};
|
||||
|
@@ -11,7 +11,7 @@ from esphome.const import (
|
||||
CONF_SERVICES,
|
||||
PlatformFramework,
|
||||
)
|
||||
from esphome.core import CORE, coroutine_with_priority
|
||||
from esphome.core import CORE, Lambda, coroutine_with_priority
|
||||
from esphome.coroutine import CoroPriority
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
@@ -58,21 +58,68 @@ CONFIG_SCHEMA = cv.All(
|
||||
)
|
||||
|
||||
|
||||
def mdns_txt_record(key: str, value: str):
|
||||
return cg.StructInitializer(
|
||||
MDNSTXTRecord,
|
||||
("key", key),
|
||||
("value", value),
|
||||
def mdns_txt_record(key: str, value: str) -> cg.RawExpression:
|
||||
"""Create a mDNS TXT record.
|
||||
|
||||
Public API for external components. Do not remove.
|
||||
|
||||
Args:
|
||||
key: The TXT record key
|
||||
value: The TXT record value (static string only)
|
||||
|
||||
Returns:
|
||||
A RawExpression representing a MDNSTXTRecord struct
|
||||
"""
|
||||
return cg.RawExpression(
|
||||
f"{{MDNS_STR({cg.safe_exp(key)}), MDNS_STR({cg.safe_exp(value)})}}"
|
||||
)
|
||||
|
||||
|
||||
async def _mdns_txt_record_templated(
|
||||
mdns_comp: cg.Pvariable, key: str, value: Lambda | str
|
||||
) -> cg.RawExpression:
|
||||
"""Create a mDNS TXT record with support for templated values.
|
||||
|
||||
Internal helper function.
|
||||
|
||||
Args:
|
||||
mdns_comp: The MDNSComponent instance (from cg.get_variable())
|
||||
key: The TXT record key
|
||||
value: The TXT record value (can be a static string or a lambda template)
|
||||
|
||||
Returns:
|
||||
A RawExpression representing a MDNSTXTRecord struct
|
||||
"""
|
||||
if not cg.is_template(value):
|
||||
# It's a static string - use directly in flash, no need to store in vector
|
||||
return mdns_txt_record(key, value)
|
||||
# It's a lambda - evaluate and store using helper
|
||||
templated_value = await cg.templatable(value, [], cg.std_string)
|
||||
safe_key = cg.safe_exp(key)
|
||||
dynamic_call = f"{mdns_comp}->add_dynamic_txt_value(({templated_value})())"
|
||||
return cg.RawExpression(f"{{MDNS_STR({safe_key}), MDNS_STR({dynamic_call})}}")
|
||||
|
||||
|
||||
def mdns_service(
|
||||
service: str, proto: str, port: int, txt_records: list[dict[str, str]]
|
||||
):
|
||||
service: str, proto: str, port: int, txt_records: list[cg.RawExpression]
|
||||
) -> cg.StructInitializer:
|
||||
"""Create a mDNS service.
|
||||
|
||||
Public API for external components. Do not remove.
|
||||
|
||||
Args:
|
||||
service: Service name (e.g., "_http")
|
||||
proto: Protocol (e.g., "_tcp" or "_udp")
|
||||
port: Port number
|
||||
txt_records: List of MDNSTXTRecord expressions
|
||||
|
||||
Returns:
|
||||
A StructInitializer representing a MDNSService struct
|
||||
"""
|
||||
return cg.StructInitializer(
|
||||
MDNSService,
|
||||
("service_type", service),
|
||||
("proto", proto),
|
||||
("service_type", cg.RawExpression(f"MDNS_STR({cg.safe_exp(service)})")),
|
||||
("proto", cg.RawExpression(f"MDNS_STR({cg.safe_exp(proto)})")),
|
||||
("port", port),
|
||||
("txt_records", txt_records),
|
||||
)
|
||||
@@ -107,23 +154,37 @@ async def to_code(config):
|
||||
# Ensure at least 1 service (fallback service)
|
||||
cg.add_define("MDNS_SERVICE_COUNT", max(1, service_count))
|
||||
|
||||
# Calculate compile-time dynamic TXT value count
|
||||
# Dynamic values are those that cannot be stored in flash at compile time
|
||||
dynamic_txt_count = 0
|
||||
if "api" in CORE.config:
|
||||
# Always: get_mac_address()
|
||||
dynamic_txt_count += 1
|
||||
# User-provided templatable TXT values (only lambdas, not static strings)
|
||||
dynamic_txt_count += sum(
|
||||
1
|
||||
for service in config[CONF_SERVICES]
|
||||
for txt_value in service[CONF_TXT].values()
|
||||
if cg.is_template(txt_value)
|
||||
)
|
||||
|
||||
# Ensure at least 1 to avoid zero-size array
|
||||
cg.add_define("MDNS_DYNAMIC_TXT_COUNT", max(1, dynamic_txt_count))
|
||||
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
|
||||
for service in config[CONF_SERVICES]:
|
||||
txt = [
|
||||
cg.StructInitializer(
|
||||
MDNSTXTRecord,
|
||||
("key", txt_key),
|
||||
("value", await cg.templatable(txt_value, [], cg.std_string)),
|
||||
)
|
||||
txt_records = [
|
||||
await _mdns_txt_record_templated(var, txt_key, txt_value)
|
||||
for txt_key, txt_value in service[CONF_TXT].items()
|
||||
]
|
||||
|
||||
exp = mdns_service(
|
||||
service[CONF_SERVICE],
|
||||
service[CONF_PROTOCOL],
|
||||
await cg.templatable(service[CONF_PORT], [], cg.uint16),
|
||||
txt,
|
||||
txt_records,
|
||||
)
|
||||
|
||||
cg.add(var.add_extra_service(exp))
|
||||
|
@@ -9,24 +9,9 @@
|
||||
#include <pgmspace.h>
|
||||
// Macro to define strings in PROGMEM on ESP8266, regular memory on other platforms
|
||||
#define MDNS_STATIC_CONST_CHAR(name, value) static const char name[] PROGMEM = value
|
||||
// Helper to get string from PROGMEM - returns a temporary std::string
|
||||
// Only define this function if we have services that will use it
|
||||
#if defined(USE_API) || defined(USE_PROMETHEUS) || defined(USE_WEBSERVER) || defined(USE_MDNS_EXTRA_SERVICES)
|
||||
static std::string mdns_string_p(const char *src) {
|
||||
char buf[64];
|
||||
strncpy_P(buf, src, sizeof(buf) - 1);
|
||||
buf[sizeof(buf) - 1] = '\0';
|
||||
return std::string(buf);
|
||||
}
|
||||
#define MDNS_STR(name) mdns_string_p(name)
|
||||
#else
|
||||
// If no services are configured, we still need the fallback service but it uses string literals
|
||||
#define MDNS_STR(name) std::string(name)
|
||||
#endif
|
||||
#else
|
||||
// On non-ESP8266 platforms, use regular const char*
|
||||
#define MDNS_STATIC_CONST_CHAR(name, value) static constexpr const char *name = value
|
||||
#define MDNS_STR(name) name
|
||||
#define MDNS_STATIC_CONST_CHAR(name, value) static constexpr const char name[] = value
|
||||
#endif
|
||||
|
||||
#ifdef USE_API
|
||||
@@ -46,30 +31,10 @@ static const char *const TAG = "mdns";
|
||||
#endif
|
||||
|
||||
// Define all constant strings using the macro
|
||||
MDNS_STATIC_CONST_CHAR(SERVICE_ESPHOMELIB, "_esphomelib");
|
||||
MDNS_STATIC_CONST_CHAR(SERVICE_TCP, "_tcp");
|
||||
MDNS_STATIC_CONST_CHAR(SERVICE_PROMETHEUS, "_prometheus-http");
|
||||
MDNS_STATIC_CONST_CHAR(SERVICE_HTTP, "_http");
|
||||
|
||||
MDNS_STATIC_CONST_CHAR(TXT_FRIENDLY_NAME, "friendly_name");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_VERSION, "version");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_MAC, "mac");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_PLATFORM, "platform");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_BOARD, "board");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_NETWORK, "network");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION, "api_encryption");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION_SUPPORTED, "api_encryption_supported");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_PROJECT_NAME, "project_name");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_PROJECT_VERSION, "project_version");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_PACKAGE_IMPORT_URL, "package_import_url");
|
||||
|
||||
MDNS_STATIC_CONST_CHAR(PLATFORM_ESP8266, "ESP8266");
|
||||
MDNS_STATIC_CONST_CHAR(PLATFORM_ESP32, "ESP32");
|
||||
MDNS_STATIC_CONST_CHAR(PLATFORM_RP2040, "RP2040");
|
||||
|
||||
MDNS_STATIC_CONST_CHAR(NETWORK_WIFI, "wifi");
|
||||
MDNS_STATIC_CONST_CHAR(NETWORK_ETHERNET, "ethernet");
|
||||
MDNS_STATIC_CONST_CHAR(NETWORK_THREAD, "thread");
|
||||
// Wrap build-time defines into flash storage
|
||||
MDNS_STATIC_CONST_CHAR(VALUE_VERSION, ESPHOME_VERSION);
|
||||
|
||||
void MDNSComponent::compile_records_() {
|
||||
this->hostname_ = App.get_name();
|
||||
@@ -78,6 +43,15 @@ void MDNSComponent::compile_records_() {
|
||||
// in mdns/__init__.py. If you add a new service here, update both locations.
|
||||
|
||||
#ifdef USE_API
|
||||
MDNS_STATIC_CONST_CHAR(SERVICE_ESPHOMELIB, "_esphomelib");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_FRIENDLY_NAME, "friendly_name");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_VERSION, "version");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_MAC, "mac");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_PLATFORM, "platform");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_BOARD, "board");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_NETWORK, "network");
|
||||
MDNS_STATIC_CONST_CHAR(VALUE_BOARD, ESPHOME_BOARD);
|
||||
|
||||
if (api::global_api_server != nullptr) {
|
||||
auto &service = this->services_.emplace_next();
|
||||
service.service_type = MDNS_STR(SERVICE_ESPHOMELIB);
|
||||
@@ -112,52 +86,66 @@ void MDNSComponent::compile_records_() {
|
||||
txt_records.reserve(txt_count);
|
||||
|
||||
if (!friendly_name_empty) {
|
||||
txt_records.push_back({MDNS_STR(TXT_FRIENDLY_NAME), friendly_name});
|
||||
txt_records.push_back({MDNS_STR(TXT_FRIENDLY_NAME), MDNS_STR(friendly_name.c_str())});
|
||||
}
|
||||
txt_records.push_back({MDNS_STR(TXT_VERSION), ESPHOME_VERSION});
|
||||
txt_records.push_back({MDNS_STR(TXT_MAC), get_mac_address()});
|
||||
txt_records.push_back({MDNS_STR(TXT_VERSION), MDNS_STR(VALUE_VERSION)});
|
||||
txt_records.push_back({MDNS_STR(TXT_MAC), MDNS_STR(this->add_dynamic_txt_value(get_mac_address()))});
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
MDNS_STATIC_CONST_CHAR(PLATFORM_ESP8266, "ESP8266");
|
||||
txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(PLATFORM_ESP8266)});
|
||||
#elif defined(USE_ESP32)
|
||||
MDNS_STATIC_CONST_CHAR(PLATFORM_ESP32, "ESP32");
|
||||
txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(PLATFORM_ESP32)});
|
||||
#elif defined(USE_RP2040)
|
||||
MDNS_STATIC_CONST_CHAR(PLATFORM_RP2040, "RP2040");
|
||||
txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(PLATFORM_RP2040)});
|
||||
#elif defined(USE_LIBRETINY)
|
||||
txt_records.emplace_back(MDNSTXTRecord{"platform", lt_cpu_get_model_name()});
|
||||
txt_records.push_back({MDNS_STR(TXT_PLATFORM), MDNS_STR(lt_cpu_get_model_name())});
|
||||
#endif
|
||||
|
||||
txt_records.push_back({MDNS_STR(TXT_BOARD), ESPHOME_BOARD});
|
||||
txt_records.push_back({MDNS_STR(TXT_BOARD), MDNS_STR(VALUE_BOARD)});
|
||||
|
||||
#if defined(USE_WIFI)
|
||||
MDNS_STATIC_CONST_CHAR(NETWORK_WIFI, "wifi");
|
||||
txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR(NETWORK_WIFI)});
|
||||
#elif defined(USE_ETHERNET)
|
||||
MDNS_STATIC_CONST_CHAR(NETWORK_ETHERNET, "ethernet");
|
||||
txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR(NETWORK_ETHERNET)});
|
||||
#elif defined(USE_OPENTHREAD)
|
||||
MDNS_STATIC_CONST_CHAR(NETWORK_THREAD, "thread");
|
||||
txt_records.push_back({MDNS_STR(TXT_NETWORK), MDNS_STR(NETWORK_THREAD)});
|
||||
#endif
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION, "api_encryption");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_API_ENCRYPTION_SUPPORTED, "api_encryption_supported");
|
||||
MDNS_STATIC_CONST_CHAR(NOISE_ENCRYPTION, "Noise_NNpsk0_25519_ChaChaPoly_SHA256");
|
||||
if (api::global_api_server->get_noise_ctx()->has_psk()) {
|
||||
txt_records.push_back({MDNS_STR(TXT_API_ENCRYPTION), MDNS_STR(NOISE_ENCRYPTION)});
|
||||
} else {
|
||||
txt_records.push_back({MDNS_STR(TXT_API_ENCRYPTION_SUPPORTED), MDNS_STR(NOISE_ENCRYPTION)});
|
||||
}
|
||||
bool has_psk = api::global_api_server->get_noise_ctx()->has_psk();
|
||||
const char *encryption_key = has_psk ? TXT_API_ENCRYPTION : TXT_API_ENCRYPTION_SUPPORTED;
|
||||
txt_records.push_back({MDNS_STR(encryption_key), MDNS_STR(NOISE_ENCRYPTION)});
|
||||
#endif
|
||||
|
||||
#ifdef ESPHOME_PROJECT_NAME
|
||||
txt_records.push_back({MDNS_STR(TXT_PROJECT_NAME), ESPHOME_PROJECT_NAME});
|
||||
txt_records.push_back({MDNS_STR(TXT_PROJECT_VERSION), ESPHOME_PROJECT_VERSION});
|
||||
MDNS_STATIC_CONST_CHAR(TXT_PROJECT_NAME, "project_name");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_PROJECT_VERSION, "project_version");
|
||||
MDNS_STATIC_CONST_CHAR(VALUE_PROJECT_NAME, ESPHOME_PROJECT_NAME);
|
||||
MDNS_STATIC_CONST_CHAR(VALUE_PROJECT_VERSION, ESPHOME_PROJECT_VERSION);
|
||||
txt_records.push_back({MDNS_STR(TXT_PROJECT_NAME), MDNS_STR(VALUE_PROJECT_NAME)});
|
||||
txt_records.push_back({MDNS_STR(TXT_PROJECT_VERSION), MDNS_STR(VALUE_PROJECT_VERSION)});
|
||||
#endif // ESPHOME_PROJECT_NAME
|
||||
|
||||
#ifdef USE_DASHBOARD_IMPORT
|
||||
txt_records.push_back({MDNS_STR(TXT_PACKAGE_IMPORT_URL), dashboard_import::get_package_import_url()});
|
||||
MDNS_STATIC_CONST_CHAR(TXT_PACKAGE_IMPORT_URL, "package_import_url");
|
||||
txt_records.push_back(
|
||||
{MDNS_STR(TXT_PACKAGE_IMPORT_URL), MDNS_STR(dashboard_import::get_package_import_url().c_str())});
|
||||
#endif
|
||||
}
|
||||
#endif // USE_API
|
||||
|
||||
#ifdef USE_PROMETHEUS
|
||||
MDNS_STATIC_CONST_CHAR(SERVICE_PROMETHEUS, "_prometheus-http");
|
||||
|
||||
auto &prom_service = this->services_.emplace_next();
|
||||
prom_service.service_type = MDNS_STR(SERVICE_PROMETHEUS);
|
||||
prom_service.proto = MDNS_STR(SERVICE_TCP);
|
||||
@@ -165,6 +153,8 @@ void MDNSComponent::compile_records_() {
|
||||
#endif
|
||||
|
||||
#ifdef USE_WEBSERVER
|
||||
MDNS_STATIC_CONST_CHAR(SERVICE_HTTP, "_http");
|
||||
|
||||
auto &web_service = this->services_.emplace_next();
|
||||
web_service.service_type = MDNS_STR(SERVICE_HTTP);
|
||||
web_service.proto = MDNS_STR(SERVICE_TCP);
|
||||
@@ -172,13 +162,16 @@ void MDNSComponent::compile_records_() {
|
||||
#endif
|
||||
|
||||
#if !defined(USE_API) && !defined(USE_PROMETHEUS) && !defined(USE_WEBSERVER) && !defined(USE_MDNS_EXTRA_SERVICES)
|
||||
MDNS_STATIC_CONST_CHAR(SERVICE_HTTP, "_http");
|
||||
MDNS_STATIC_CONST_CHAR(TXT_VERSION, "version");
|
||||
|
||||
// Publish "http" service if not using native API or any other services
|
||||
// This is just to have *some* mDNS service so that .local resolution works
|
||||
auto &fallback_service = this->services_.emplace_next();
|
||||
fallback_service.service_type = "_http";
|
||||
fallback_service.proto = "_tcp";
|
||||
fallback_service.service_type = MDNS_STR(SERVICE_HTTP);
|
||||
fallback_service.proto = MDNS_STR(SERVICE_TCP);
|
||||
fallback_service.port = USE_WEBSERVER_PORT;
|
||||
fallback_service.txt_records.emplace_back(MDNSTXTRecord{"version", ESPHOME_VERSION});
|
||||
fallback_service.txt_records.push_back({MDNS_STR(TXT_VERSION), MDNS_STR(VALUE_VERSION)});
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -190,11 +183,10 @@ void MDNSComponent::dump_config() {
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
ESP_LOGV(TAG, " Services:");
|
||||
for (const auto &service : this->services_) {
|
||||
ESP_LOGV(TAG, " - %s, %s, %d", service.service_type.c_str(), service.proto.c_str(),
|
||||
ESP_LOGV(TAG, " - %s, %s, %d", MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto),
|
||||
const_cast<TemplatableValue<uint16_t> &>(service.port).value());
|
||||
for (const auto &record : service.txt_records) {
|
||||
ESP_LOGV(TAG, " TXT: %s = %s", record.key.c_str(),
|
||||
const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
|
||||
ESP_LOGV(TAG, " TXT: %s = %s", MDNS_STR_ARG(record.key), MDNS_STR_ARG(record.value));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
@@ -9,21 +9,34 @@
|
||||
namespace esphome {
|
||||
namespace mdns {
|
||||
|
||||
// Helper struct that identifies strings that may be stored in flash storage (similar to LogString)
|
||||
struct MDNSString;
|
||||
|
||||
// Macro to cast string literals to MDNSString* (works on all platforms)
|
||||
#define MDNS_STR(name) (reinterpret_cast<const esphome::mdns::MDNSString *>(name))
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
#include <pgmspace.h>
|
||||
#define MDNS_STR_ARG(s) ((PGM_P) (s))
|
||||
#else
|
||||
#define MDNS_STR_ARG(s) (reinterpret_cast<const char *>(s))
|
||||
#endif
|
||||
|
||||
// Service count is calculated at compile time by Python codegen
|
||||
// MDNS_SERVICE_COUNT will always be defined
|
||||
|
||||
struct MDNSTXTRecord {
|
||||
std::string key;
|
||||
TemplatableValue<std::string> value;
|
||||
const MDNSString *key;
|
||||
const MDNSString *value;
|
||||
};
|
||||
|
||||
struct MDNSService {
|
||||
// service name _including_ underscore character prefix
|
||||
// as defined in RFC6763 Section 7
|
||||
std::string service_type;
|
||||
const MDNSString *service_type;
|
||||
// second label indicating protocol _including_ underscore character prefix
|
||||
// as defined in RFC6763 Section 7, like "_tcp" or "_udp"
|
||||
std::string proto;
|
||||
const MDNSString *proto;
|
||||
TemplatableValue<uint16_t> port;
|
||||
std::vector<MDNSTXTRecord> txt_records;
|
||||
};
|
||||
@@ -46,6 +59,17 @@ class MDNSComponent : public Component {
|
||||
|
||||
void on_shutdown() override;
|
||||
|
||||
/// Add a dynamic TXT value and return pointer to it for use in MDNSTXTRecord
|
||||
const char *add_dynamic_txt_value(const std::string &value) {
|
||||
this->dynamic_txt_values_.push_back(value);
|
||||
return this->dynamic_txt_values_[this->dynamic_txt_values_.size() - 1].c_str();
|
||||
}
|
||||
|
||||
/// Storage for runtime-generated TXT values (MAC address, user lambdas)
|
||||
/// Pre-sized at compile time via MDNS_DYNAMIC_TXT_COUNT to avoid heap allocations.
|
||||
/// Static/compile-time values (version, board, etc.) are stored directly in flash and don't use this.
|
||||
StaticVector<std::string, MDNS_DYNAMIC_TXT_COUNT> dynamic_txt_values_;
|
||||
|
||||
protected:
|
||||
StaticVector<MDNSService, MDNS_SERVICE_COUNT> services_{};
|
||||
std::string hostname_;
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#if defined(USE_ESP32) && defined(USE_MDNS)
|
||||
|
||||
#include <mdns.h>
|
||||
#include <cstring>
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "mdns_component.h"
|
||||
@@ -26,18 +25,21 @@ void MDNSComponent::setup() {
|
||||
mdns_instance_name_set(this->hostname_.c_str());
|
||||
|
||||
for (const auto &service : this->services_) {
|
||||
std::vector<mdns_txt_item_t> txt_records(service.txt_records.size());
|
||||
for (size_t i = 0; i < service.txt_records.size(); i++) {
|
||||
// mdns_service_add copies the strings internally, no need to strdup
|
||||
txt_records[i].key = service.txt_records[i].key.c_str();
|
||||
txt_records[i].value = const_cast<TemplatableValue<std::string> &>(service.txt_records[i].value).value().c_str();
|
||||
std::vector<mdns_txt_item_t> txt_records;
|
||||
for (const auto &record : service.txt_records) {
|
||||
mdns_txt_item_t it{};
|
||||
// key and value are either compile-time string literals in flash or pointers to dynamic_txt_values_
|
||||
// Both remain valid for the lifetime of this function, and ESP-IDF makes internal copies
|
||||
it.key = MDNS_STR_ARG(record.key);
|
||||
it.value = MDNS_STR_ARG(record.value);
|
||||
txt_records.push_back(it);
|
||||
}
|
||||
uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
|
||||
err = mdns_service_add(nullptr, service.service_type.c_str(), service.proto.c_str(), port, txt_records.data(),
|
||||
txt_records.size());
|
||||
err = mdns_service_add(nullptr, MDNS_STR_ARG(service.service_type), MDNS_STR_ARG(service.proto), port,
|
||||
txt_records.data(), txt_records.size());
|
||||
|
||||
if (err != ESP_OK) {
|
||||
ESP_LOGW(TAG, "Failed to register service %s: %s", service.service_type.c_str(), esp_err_to_name(err));
|
||||
ESP_LOGW(TAG, "Failed to register service %s: %s", MDNS_STR_ARG(service.service_type), esp_err_to_name(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -21,19 +21,19 @@ void MDNSComponent::setup() {
|
||||
// part of the wire protocol to have an underscore, and for example ESP-IDF
|
||||
// expects the underscore to be there, the ESP8266 implementation always adds
|
||||
// the underscore itself.
|
||||
auto *proto = service.proto.c_str();
|
||||
while (*proto == '_') {
|
||||
auto *proto = MDNS_STR_ARG(service.proto);
|
||||
while (progmem_read_byte((const uint8_t *) proto) == '_') {
|
||||
proto++;
|
||||
}
|
||||
auto *service_type = service.service_type.c_str();
|
||||
while (*service_type == '_') {
|
||||
auto *service_type = MDNS_STR_ARG(service.service_type);
|
||||
while (progmem_read_byte((const uint8_t *) service_type) == '_') {
|
||||
service_type++;
|
||||
}
|
||||
uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
|
||||
MDNS.addService(service_type, proto, port);
|
||||
MDNS.addService(FPSTR(service_type), FPSTR(proto), port);
|
||||
for (const auto &record : service.txt_records) {
|
||||
MDNS.addServiceTxt(service_type, proto, record.key.c_str(),
|
||||
const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
|
||||
MDNS.addServiceTxt(FPSTR(service_type), FPSTR(proto), FPSTR(MDNS_STR_ARG(record.key)),
|
||||
FPSTR(MDNS_STR_ARG(record.value)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -21,19 +21,18 @@ void MDNSComponent::setup() {
|
||||
// part of the wire protocol to have an underscore, and for example ESP-IDF
|
||||
// expects the underscore to be there, the ESP8266 implementation always adds
|
||||
// the underscore itself.
|
||||
auto *proto = service.proto.c_str();
|
||||
auto *proto = MDNS_STR_ARG(service.proto);
|
||||
while (*proto == '_') {
|
||||
proto++;
|
||||
}
|
||||
auto *service_type = service.service_type.c_str();
|
||||
auto *service_type = MDNS_STR_ARG(service.service_type);
|
||||
while (*service_type == '_') {
|
||||
service_type++;
|
||||
}
|
||||
uint16_t port_ = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
|
||||
MDNS.addService(service_type, proto, port_);
|
||||
for (const auto &record : service.txt_records) {
|
||||
MDNS.addServiceTxt(service_type, proto, record.key.c_str(),
|
||||
const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
|
||||
MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key), MDNS_STR_ARG(record.value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -21,19 +21,18 @@ void MDNSComponent::setup() {
|
||||
// part of the wire protocol to have an underscore, and for example ESP-IDF
|
||||
// expects the underscore to be there, the ESP8266 implementation always adds
|
||||
// the underscore itself.
|
||||
auto *proto = service.proto.c_str();
|
||||
auto *proto = MDNS_STR_ARG(service.proto);
|
||||
while (*proto == '_') {
|
||||
proto++;
|
||||
}
|
||||
auto *service_type = service.service_type.c_str();
|
||||
auto *service_type = MDNS_STR_ARG(service.service_type);
|
||||
while (*service_type == '_') {
|
||||
service_type++;
|
||||
}
|
||||
uint16_t port = const_cast<TemplatableValue<uint16_t> &>(service.port).value();
|
||||
MDNS.addService(service_type, proto, port);
|
||||
for (const auto &record : service.txt_records) {
|
||||
MDNS.addServiceTxt(service_type, proto, record.key.c_str(),
|
||||
const_cast<TemplatableValue<std::string> &>(record.value).value().c_str());
|
||||
MDNS.addServiceTxt(service_type, proto, MDNS_STR_ARG(record.key), MDNS_STR_ARG(record.value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -66,7 +66,10 @@ bool Modbus::parse_modbus_byte_(uint8_t byte) {
|
||||
uint8_t data_offset = 3;
|
||||
|
||||
// Per https://modbus.org/docs/Modbus_Application_Protocol_V1_1b3.pdf Ch 5 User-Defined function codes
|
||||
if (((function_code >= 65) && (function_code <= 72)) || ((function_code >= 100) && (function_code <= 110))) {
|
||||
if (((function_code >= FUNCTION_CODE_USER_DEFINED_SPACE_1_INIT) &&
|
||||
(function_code <= FUNCTION_CODE_USER_DEFINED_SPACE_1_END)) ||
|
||||
((function_code >= FUNCTION_CODE_USER_DEFINED_SPACE_2_INIT) &&
|
||||
(function_code <= FUNCTION_CODE_USER_DEFINED_SPACE_2_END))) {
|
||||
// Handle user-defined function, since we don't know how big this ought to be,
|
||||
// ideally we should delegate the entire length detection to whatever handler is
|
||||
// installed, but wait, there is the CRC, and if we get a hit there is a good
|
||||
@@ -91,10 +94,14 @@ bool Modbus::parse_modbus_byte_(uint8_t byte) {
|
||||
} else {
|
||||
// data starts at 2 and length is 4 for read registers commands
|
||||
if (this->role == ModbusRole::SERVER) {
|
||||
if (function_code == 0x1 || function_code == 0x3 || function_code == 0x4 || function_code == 0x6) {
|
||||
if (function_code == ModbusFunctionCode::READ_COILS ||
|
||||
function_code == ModbusFunctionCode::READ_DISCRETE_INPUTS ||
|
||||
function_code == ModbusFunctionCode::READ_HOLDING_REGISTERS ||
|
||||
function_code == ModbusFunctionCode::READ_INPUT_REGISTERS ||
|
||||
function_code == ModbusFunctionCode::WRITE_SINGLE_REGISTER) {
|
||||
data_offset = 2;
|
||||
data_len = 4;
|
||||
} else if (function_code == 0x10) {
|
||||
} else if (function_code == ModbusFunctionCode::WRITE_MULTIPLE_REGISTERS) {
|
||||
if (at < 6) {
|
||||
return true;
|
||||
}
|
||||
@@ -104,7 +111,10 @@ bool Modbus::parse_modbus_byte_(uint8_t byte) {
|
||||
}
|
||||
} else {
|
||||
// the response for write command mirrors the requests and data starts at offset 2 instead of 3 for read commands
|
||||
if (function_code == 0x5 || function_code == 0x06 || function_code == 0xF || function_code == 0x10) {
|
||||
if (function_code == ModbusFunctionCode::WRITE_SINGLE_COIL ||
|
||||
function_code == ModbusFunctionCode::WRITE_SINGLE_REGISTER ||
|
||||
function_code == ModbusFunctionCode::WRITE_MULTIPLE_COILS ||
|
||||
function_code == ModbusFunctionCode::WRITE_MULTIPLE_REGISTERS) {
|
||||
data_offset = 2;
|
||||
data_len = 4;
|
||||
}
|
||||
@@ -112,7 +122,7 @@ bool Modbus::parse_modbus_byte_(uint8_t byte) {
|
||||
|
||||
// Error ( msb indicates error )
|
||||
// response format: Byte[0] = device address, Byte[1] function code | 0x80 , Byte[2] exception code, Byte[3-4] crc
|
||||
if ((function_code & 0x80) == 0x80) {
|
||||
if ((function_code & FUNCTION_CODE_EXCEPTION_MASK) == FUNCTION_CODE_EXCEPTION_MASK) {
|
||||
data_offset = 2;
|
||||
data_len = 1;
|
||||
}
|
||||
@@ -143,10 +153,10 @@ bool Modbus::parse_modbus_byte_(uint8_t byte) {
|
||||
if (device->address_ == address) {
|
||||
found = true;
|
||||
// Is it an error response?
|
||||
if ((function_code & 0x80) == 0x80) {
|
||||
if ((function_code & FUNCTION_CODE_EXCEPTION_MASK) == FUNCTION_CODE_EXCEPTION_MASK) {
|
||||
ESP_LOGD(TAG, "Modbus error function code: 0x%X exception: %d", function_code, raw[2]);
|
||||
if (waiting_for_response != 0) {
|
||||
device->on_modbus_error(function_code & 0x7F, raw[2]);
|
||||
device->on_modbus_error(function_code & FUNCTION_CODE_MASK, raw[2]);
|
||||
} else {
|
||||
// Ignore modbus exception not related to a pending command
|
||||
ESP_LOGD(TAG, "Ignoring Modbus error - not expecting a response");
|
||||
@@ -154,12 +164,14 @@ bool Modbus::parse_modbus_byte_(uint8_t byte) {
|
||||
continue;
|
||||
}
|
||||
if (this->role == ModbusRole::SERVER) {
|
||||
if (function_code == 0x3 || function_code == 0x4) {
|
||||
if (function_code == ModbusFunctionCode::READ_HOLDING_REGISTERS ||
|
||||
function_code == ModbusFunctionCode::READ_INPUT_REGISTERS) {
|
||||
device->on_modbus_read_registers(function_code, uint16_t(data[1]) | (uint16_t(data[0]) << 8),
|
||||
uint16_t(data[3]) | (uint16_t(data[2]) << 8));
|
||||
continue;
|
||||
}
|
||||
if (function_code == 0x6 || function_code == 0x10) {
|
||||
if (function_code == ModbusFunctionCode::WRITE_SINGLE_REGISTER ||
|
||||
function_code == ModbusFunctionCode::WRITE_MULTIPLE_REGISTERS) {
|
||||
device->on_modbus_write_registers(function_code, data);
|
||||
continue;
|
||||
}
|
||||
@@ -199,7 +211,7 @@ void Modbus::send(uint8_t address, uint8_t function_code, uint16_t start_address
|
||||
|
||||
// Only check max number of registers for standard function codes
|
||||
// Some devices use non standard codes like 0x43
|
||||
if (number_of_entities > MAX_VALUES && function_code <= 0x10) {
|
||||
if (number_of_entities > MAX_VALUES && function_code <= ModbusFunctionCode::WRITE_MULTIPLE_REGISTERS) {
|
||||
ESP_LOGE(TAG, "send too many values %d max=%zu", number_of_entities, MAX_VALUES);
|
||||
return;
|
||||
}
|
||||
@@ -210,15 +222,17 @@ void Modbus::send(uint8_t address, uint8_t function_code, uint16_t start_address
|
||||
if (this->role == ModbusRole::CLIENT) {
|
||||
data.push_back(start_address >> 8);
|
||||
data.push_back(start_address >> 0);
|
||||
if (function_code != 0x5 && function_code != 0x6) {
|
||||
if (function_code != ModbusFunctionCode::WRITE_SINGLE_COIL &&
|
||||
function_code != ModbusFunctionCode::WRITE_SINGLE_REGISTER) {
|
||||
data.push_back(number_of_entities >> 8);
|
||||
data.push_back(number_of_entities >> 0);
|
||||
}
|
||||
}
|
||||
|
||||
if (payload != nullptr) {
|
||||
if (this->role == ModbusRole::SERVER || function_code == 0xF || function_code == 0x10) { // Write multiple
|
||||
data.push_back(payload_len); // Byte count is required for write
|
||||
if (this->role == ModbusRole::SERVER || function_code == ModbusFunctionCode::WRITE_MULTIPLE_COILS ||
|
||||
function_code == ModbusFunctionCode::WRITE_MULTIPLE_REGISTERS) { // Write multiple
|
||||
data.push_back(payload_len); // Byte count is required for write
|
||||
} else {
|
||||
payload_len = 2; // Write single register or coil
|
||||
}
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/components/uart/uart.h"
|
||||
|
||||
#include "esphome/components/modbus/modbus_definitions.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
@@ -65,12 +67,12 @@ class ModbusDevice {
|
||||
this->parent_->send(this->address_, function, start_address, number_of_entities, payload_len, payload);
|
||||
}
|
||||
void send_raw(const std::vector<uint8_t> &payload) { this->parent_->send_raw(payload); }
|
||||
void send_error(uint8_t function_code, uint8_t exception_code) {
|
||||
void send_error(uint8_t function_code, ModbusExceptionCode exception_code) {
|
||||
std::vector<uint8_t> error_response;
|
||||
error_response.reserve(3);
|
||||
error_response.push_back(this->address_);
|
||||
error_response.push_back(function_code | 0x80);
|
||||
error_response.push_back(exception_code);
|
||||
error_response.push_back(function_code | FUNCTION_CODE_EXCEPTION_MASK);
|
||||
error_response.push_back(static_cast<uint8_t>(exception_code));
|
||||
this->send_raw(error_response);
|
||||
}
|
||||
// If more than one device is connected block sending a new command before a response is received
|
||||
|
86
esphome/components/modbus/modbus_definitions.h
Normal file
86
esphome/components/modbus/modbus_definitions.h
Normal file
@@ -0,0 +1,86 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace modbus {
|
||||
|
||||
/// Modbus definitions from specs:
|
||||
/// https://modbus.org/docs/Modbus_Application_Protocol_V1_1b3.pdf
|
||||
// 5 Function Code Categories
|
||||
const uint8_t FUNCTION_CODE_USER_DEFINED_SPACE_1_INIT = 65; // 0x41
|
||||
const uint8_t FUNCTION_CODE_USER_DEFINED_SPACE_1_END = 72; // 0x48
|
||||
|
||||
const uint8_t FUNCTION_CODE_USER_DEFINED_SPACE_2_INIT = 100; // 0x64
|
||||
const uint8_t FUNCTION_CODE_USER_DEFINED_SPACE_2_END = 110; // 0x6E
|
||||
|
||||
enum class ModbusFunctionCode : uint8_t {
|
||||
CUSTOM = 0x00,
|
||||
READ_COILS = 0x01,
|
||||
READ_DISCRETE_INPUTS = 0x02,
|
||||
READ_HOLDING_REGISTERS = 0x03,
|
||||
READ_INPUT_REGISTERS = 0x04,
|
||||
WRITE_SINGLE_COIL = 0x05,
|
||||
WRITE_SINGLE_REGISTER = 0x06,
|
||||
READ_EXCEPTION_STATUS = 0x07, // not implemented
|
||||
DIAGNOSTICS = 0x08, // not implemented
|
||||
GET_COMM_EVENT_COUNTER = 0x0B, // not implemented
|
||||
GET_COMM_EVENT_LOG = 0x0C, // not implemented
|
||||
WRITE_MULTIPLE_COILS = 0x0F,
|
||||
WRITE_MULTIPLE_REGISTERS = 0x10,
|
||||
REPORT_SERVER_ID = 0x11, // not implemented
|
||||
READ_FILE_RECORD = 0x14, // not implemented
|
||||
WRITE_FILE_RECORD = 0x15, // not implemented
|
||||
MASK_WRITE_REGISTER = 0x16, // not implemented
|
||||
READ_WRITE_MULTIPLE_REGISTERS = 0x17, // not implemented
|
||||
READ_FIFO_QUEUE = 0x18, // not implemented
|
||||
};
|
||||
|
||||
/*Allow comparison operators between ModbusFunctionCode and uint8_t*/
|
||||
inline bool operator==(ModbusFunctionCode lhs, uint8_t rhs) { return static_cast<uint8_t>(lhs) == rhs; }
|
||||
inline bool operator==(uint8_t lhs, ModbusFunctionCode rhs) { return lhs == static_cast<uint8_t>(rhs); }
|
||||
inline bool operator!=(ModbusFunctionCode lhs, uint8_t rhs) { return !(static_cast<uint8_t>(lhs) == rhs); }
|
||||
inline bool operator!=(uint8_t lhs, ModbusFunctionCode rhs) { return !(lhs == static_cast<uint8_t>(rhs)); }
|
||||
inline bool operator<(ModbusFunctionCode lhs, uint8_t rhs) { return static_cast<uint8_t>(lhs) < rhs; }
|
||||
inline bool operator<(uint8_t lhs, ModbusFunctionCode rhs) { return lhs < static_cast<uint8_t>(rhs); }
|
||||
inline bool operator<=(ModbusFunctionCode lhs, uint8_t rhs) { return static_cast<uint8_t>(lhs) <= rhs; }
|
||||
inline bool operator<=(uint8_t lhs, ModbusFunctionCode rhs) { return lhs <= static_cast<uint8_t>(rhs); }
|
||||
inline bool operator>(ModbusFunctionCode lhs, uint8_t rhs) { return static_cast<uint8_t>(lhs) > rhs; }
|
||||
inline bool operator>(uint8_t lhs, ModbusFunctionCode rhs) { return lhs > static_cast<uint8_t>(rhs); }
|
||||
inline bool operator>=(ModbusFunctionCode lhs, uint8_t rhs) { return static_cast<uint8_t>(lhs) >= rhs; }
|
||||
inline bool operator>=(uint8_t lhs, ModbusFunctionCode rhs) { return lhs >= static_cast<uint8_t>(rhs); }
|
||||
|
||||
// 4.3 MODBUS Data model
|
||||
enum class ModbusRegisterType : uint8_t {
|
||||
CUSTOM = 0x00,
|
||||
COIL = 0x01,
|
||||
DISCRETE_INPUT = 0x02,
|
||||
HOLDING = 0x03,
|
||||
READ = 0x04,
|
||||
};
|
||||
|
||||
// 7 MODBUS Exception Responses:
|
||||
const uint8_t FUNCTION_CODE_MASK = 0x7F;
|
||||
const uint8_t FUNCTION_CODE_EXCEPTION_MASK = 0x80;
|
||||
|
||||
enum class ModbusExceptionCode : uint8_t {
|
||||
ILLEGAL_FUNCTION = 0x01,
|
||||
ILLEGAL_DATA_ADDRESS = 0x02,
|
||||
ILLEGAL_DATA_VALUE = 0x03,
|
||||
SERVICE_DEVICE_FAILURE = 0x04,
|
||||
ACKNOWLEDGE = 0x05,
|
||||
SERVER_DEVICE_BUSY = 0x06,
|
||||
MEMORY_PARITY_ERROR = 0x08,
|
||||
GATEWAY_PATH_UNAVAILABLE = 0x0A,
|
||||
GATEWAY_TARGET_DEVICE_FAILED_TO_RESPOND = 0x0B,
|
||||
};
|
||||
|
||||
// 6.12 16 (0x10) Write Multiple registers:
|
||||
const uint8_t MAX_NUM_OF_REGISTERS_TO_WRITE = 123; // 0x7B
|
||||
|
||||
// 6.3 03 (0x03) Read Holding Registers
|
||||
// 6.4 04 (0x04) Read Input Registers
|
||||
const uint8_t MAX_NUM_OF_REGISTERS_TO_READ = 125; // 0x7D
|
||||
/// End of Modbus definitions
|
||||
} // namespace modbus
|
||||
} // namespace esphome
|
@@ -20,6 +20,7 @@ from .const import (
|
||||
CONF_BYTE_OFFSET,
|
||||
CONF_COMMAND_THROTTLE,
|
||||
CONF_CUSTOM_COMMAND,
|
||||
CONF_ENABLED,
|
||||
CONF_FORCE_NEW_RANGE,
|
||||
CONF_MAX_CMD_RETRIES,
|
||||
CONF_MODBUS_CONTROLLER_ID,
|
||||
@@ -28,8 +29,11 @@ from .const import (
|
||||
CONF_ON_OFFLINE,
|
||||
CONF_ON_ONLINE,
|
||||
CONF_REGISTER_COUNT,
|
||||
CONF_REGISTER_LAST_ADDRESS,
|
||||
CONF_REGISTER_TYPE,
|
||||
CONF_REGISTER_VALUE,
|
||||
CONF_RESPONSE_SIZE,
|
||||
CONF_SERVER_COURTESY_RESPONSE,
|
||||
CONF_SKIP_UPDATES,
|
||||
CONF_VALUE_TYPE,
|
||||
)
|
||||
@@ -49,6 +53,7 @@ ModbusController = modbus_controller_ns.class_(
|
||||
)
|
||||
|
||||
SensorItem = modbus_controller_ns.struct("SensorItem")
|
||||
ServerCourtesyResponse = modbus_controller_ns.struct("ServerCourtesyResponse")
|
||||
ServerRegister = modbus_controller_ns.struct("ServerRegister")
|
||||
|
||||
ModbusFunctionCode_ns = modbus_controller_ns.namespace("ModbusFunctionCode")
|
||||
@@ -143,6 +148,14 @@ ModbusOfflineTrigger = modbus_controller_ns.class_(
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVER_COURTESY_RESPONSE_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Optional(CONF_ENABLED, default=False): cv.boolean,
|
||||
cv.Optional(CONF_REGISTER_LAST_ADDRESS, default=0xFFFF): cv.hex_uint16_t,
|
||||
cv.Optional(CONF_REGISTER_VALUE, default=0): cv.hex_uint16_t,
|
||||
}
|
||||
)
|
||||
|
||||
ModbusServerRegisterSchema = cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(ServerRegister),
|
||||
@@ -162,6 +175,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.Optional(
|
||||
CONF_COMMAND_THROTTLE, default="0ms"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(CONF_SERVER_COURTESY_RESPONSE): SERVER_COURTESY_RESPONSE_SCHEMA,
|
||||
cv.Optional(CONF_MAX_CMD_RETRIES, default=4): cv.positive_int,
|
||||
cv.Optional(CONF_OFFLINE_SKIP_UPDATES, default=0): cv.positive_int,
|
||||
cv.Optional(
|
||||
@@ -232,7 +246,7 @@ def validate_modbus_register(config):
|
||||
|
||||
|
||||
def _final_validate(config):
|
||||
if CONF_SERVER_REGISTERS in config:
|
||||
if CONF_SERVER_COURTESY_RESPONSE in config or CONF_SERVER_REGISTERS in config:
|
||||
return modbus.final_validate_modbus_device("modbus_controller", role="server")(
|
||||
config
|
||||
)
|
||||
@@ -299,6 +313,20 @@ async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
cg.add(var.set_allow_duplicate_commands(config[CONF_ALLOW_DUPLICATE_COMMANDS]))
|
||||
cg.add(var.set_command_throttle(config[CONF_COMMAND_THROTTLE]))
|
||||
if server_courtesy_response := config.get(CONF_SERVER_COURTESY_RESPONSE):
|
||||
cg.add(
|
||||
var.set_server_courtesy_response(
|
||||
cg.StructInitializer(
|
||||
ServerCourtesyResponse,
|
||||
("enabled", server_courtesy_response[CONF_ENABLED]),
|
||||
(
|
||||
"register_last_address",
|
||||
server_courtesy_response[CONF_REGISTER_LAST_ADDRESS],
|
||||
),
|
||||
("register_value", server_courtesy_response[CONF_REGISTER_VALUE]),
|
||||
)
|
||||
)
|
||||
)
|
||||
cg.add(var.set_max_cmd_retries(config[CONF_MAX_CMD_RETRIES]))
|
||||
cg.add(var.set_offline_skip_updates(config[CONF_OFFLINE_SKIP_UPDATES]))
|
||||
if CONF_SERVER_REGISTERS in config:
|
||||
|
@@ -2,6 +2,7 @@ CONF_ALLOW_DUPLICATE_COMMANDS = "allow_duplicate_commands"
|
||||
CONF_BITMASK = "bitmask"
|
||||
CONF_BYTE_OFFSET = "byte_offset"
|
||||
CONF_COMMAND_THROTTLE = "command_throttle"
|
||||
CONF_ENABLED = "enabled"
|
||||
CONF_OFFLINE_SKIP_UPDATES = "offline_skip_updates"
|
||||
CONF_CUSTOM_COMMAND = "custom_command"
|
||||
CONF_FORCE_NEW_RANGE = "force_new_range"
|
||||
@@ -13,8 +14,11 @@ CONF_ON_ONLINE = "on_online"
|
||||
CONF_ON_OFFLINE = "on_offline"
|
||||
CONF_RAW_ENCODE = "raw_encode"
|
||||
CONF_REGISTER_COUNT = "register_count"
|
||||
CONF_REGISTER_LAST_ADDRESS = "register_last_address"
|
||||
CONF_REGISTER_TYPE = "register_type"
|
||||
CONF_REGISTER_VALUE = "register_value"
|
||||
CONF_RESPONSE_SIZE = "response_size"
|
||||
CONF_SERVER_COURTESY_RESPONSE = "server_courtesy_response"
|
||||
CONF_SKIP_UPDATES = "skip_updates"
|
||||
CONF_USE_WRITE_MULTIPLE = "use_write_multiple"
|
||||
CONF_VALUE_TYPE = "value_type"
|
||||
|
@@ -112,6 +112,12 @@ void ModbusController::on_modbus_read_registers(uint8_t function_code, uint16_t
|
||||
"0x%X.",
|
||||
this->address_, function_code, start_address, number_of_registers);
|
||||
|
||||
if (number_of_registers == 0 || number_of_registers > modbus::MAX_NUM_OF_REGISTERS_TO_READ) {
|
||||
ESP_LOGW(TAG, "Invalid number of registers %d. Sending exception response.", number_of_registers);
|
||||
this->send_error(function_code, ModbusExceptionCode::ILLEGAL_DATA_ADDRESS);
|
||||
return;
|
||||
}
|
||||
|
||||
std::vector<uint16_t> sixteen_bit_response;
|
||||
for (uint16_t current_address = start_address; current_address < start_address + number_of_registers;) {
|
||||
bool found = false;
|
||||
@@ -136,9 +142,21 @@ void ModbusController::on_modbus_read_registers(uint8_t function_code, uint16_t
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
ESP_LOGW(TAG, "Could not match any register to address %02X. Sending exception response.", current_address);
|
||||
send_error(function_code, 0x02);
|
||||
return;
|
||||
if (this->server_courtesy_response_.enabled &&
|
||||
(current_address <= this->server_courtesy_response_.register_last_address)) {
|
||||
ESP_LOGD(TAG,
|
||||
"Could not match any register to address 0x%02X, but default allowed. "
|
||||
"Returning default value: %d.",
|
||||
current_address, this->server_courtesy_response_.register_value);
|
||||
sixteen_bit_response.push_back(this->server_courtesy_response_.register_value);
|
||||
current_address += 1; // Just increment by 1, as the default response is a single register
|
||||
} else {
|
||||
ESP_LOGW(TAG,
|
||||
"Could not match any register to address 0x%02X and default not allowed. Sending exception response.",
|
||||
current_address);
|
||||
this->send_error(function_code, ModbusExceptionCode::ILLEGAL_DATA_ADDRESS);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,27 +174,27 @@ void ModbusController::on_modbus_write_registers(uint8_t function_code, const st
|
||||
uint16_t number_of_registers;
|
||||
uint16_t payload_offset;
|
||||
|
||||
if (function_code == 0x10) {
|
||||
if (function_code == ModbusFunctionCode::WRITE_MULTIPLE_REGISTERS) {
|
||||
number_of_registers = uint16_t(data[3]) | (uint16_t(data[2]) << 8);
|
||||
if (number_of_registers == 0 || number_of_registers > 0x7B) {
|
||||
if (number_of_registers == 0 || number_of_registers > modbus::MAX_NUM_OF_REGISTERS_TO_WRITE) {
|
||||
ESP_LOGW(TAG, "Invalid number of registers %d. Sending exception response.", number_of_registers);
|
||||
send_error(function_code, 3);
|
||||
this->send_error(function_code, ModbusExceptionCode::ILLEGAL_DATA_VALUE);
|
||||
return;
|
||||
}
|
||||
uint16_t payload_size = data[4];
|
||||
if (payload_size != number_of_registers * 2) {
|
||||
ESP_LOGW(TAG, "Payload size of %d bytes is not 2 times the number of registers (%d). Sending exception response.",
|
||||
payload_size, number_of_registers);
|
||||
send_error(function_code, 3);
|
||||
this->send_error(function_code, ModbusExceptionCode::ILLEGAL_DATA_VALUE);
|
||||
return;
|
||||
}
|
||||
payload_offset = 5;
|
||||
} else if (function_code == 0x06) {
|
||||
} else if (function_code == ModbusFunctionCode::WRITE_SINGLE_REGISTER) {
|
||||
number_of_registers = 1;
|
||||
payload_offset = 2;
|
||||
} else {
|
||||
ESP_LOGW(TAG, "Invalid function code 0x%X. Sending exception response.", function_code);
|
||||
send_error(function_code, 1);
|
||||
this->send_error(function_code, ModbusExceptionCode::ILLEGAL_FUNCTION);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -211,7 +229,7 @@ void ModbusController::on_modbus_write_registers(uint8_t function_code, const st
|
||||
if (!for_each_register([](ServerRegister *server_register, uint16_t offset) -> bool {
|
||||
return server_register->write_lambda != nullptr;
|
||||
})) {
|
||||
send_error(function_code, 1);
|
||||
this->send_error(function_code, ModbusExceptionCode::ILLEGAL_FUNCTION);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -220,7 +238,7 @@ void ModbusController::on_modbus_write_registers(uint8_t function_code, const st
|
||||
int64_t number = payload_to_number(data, server_register->value_type, offset, 0xFFFFFFFF);
|
||||
return server_register->write_lambda(number);
|
||||
})) {
|
||||
send_error(function_code, 4);
|
||||
this->send_error(function_code, ModbusExceptionCode::SERVICE_DEVICE_FAILURE);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -431,8 +449,15 @@ void ModbusController::dump_config() {
|
||||
"ModbusController:\n"
|
||||
" Address: 0x%02X\n"
|
||||
" Max Command Retries: %d\n"
|
||||
" Offline Skip Updates: %d",
|
||||
this->address_, this->max_cmd_retries_, this->offline_skip_updates_);
|
||||
" Offline Skip Updates: %d\n"
|
||||
" Server Courtesy Response:\n"
|
||||
" Enabled: %s\n"
|
||||
" Register Last Address: 0x%02X\n"
|
||||
" Register Value: %d",
|
||||
this->address_, this->max_cmd_retries_, this->offline_skip_updates_,
|
||||
this->server_courtesy_response_.enabled ? "true" : "false",
|
||||
this->server_courtesy_response_.register_last_address, this->server_courtesy_response_.register_value);
|
||||
|
||||
#if ESPHOME_LOG_LEVEL >= ESPHOME_LOG_LEVEL_VERBOSE
|
||||
ESP_LOGCONFIG(TAG, "sensormap");
|
||||
for (auto &it : this->sensorset_) {
|
||||
|
@@ -16,35 +16,9 @@ namespace modbus_controller {
|
||||
|
||||
class ModbusController;
|
||||
|
||||
enum class ModbusFunctionCode {
|
||||
CUSTOM = 0x00,
|
||||
READ_COILS = 0x01,
|
||||
READ_DISCRETE_INPUTS = 0x02,
|
||||
READ_HOLDING_REGISTERS = 0x03,
|
||||
READ_INPUT_REGISTERS = 0x04,
|
||||
WRITE_SINGLE_COIL = 0x05,
|
||||
WRITE_SINGLE_REGISTER = 0x06,
|
||||
READ_EXCEPTION_STATUS = 0x07, // not implemented
|
||||
DIAGNOSTICS = 0x08, // not implemented
|
||||
GET_COMM_EVENT_COUNTER = 0x0B, // not implemented
|
||||
GET_COMM_EVENT_LOG = 0x0C, // not implemented
|
||||
WRITE_MULTIPLE_COILS = 0x0F,
|
||||
WRITE_MULTIPLE_REGISTERS = 0x10,
|
||||
REPORT_SERVER_ID = 0x11, // not implemented
|
||||
READ_FILE_RECORD = 0x14, // not implemented
|
||||
WRITE_FILE_RECORD = 0x15, // not implemented
|
||||
MASK_WRITE_REGISTER = 0x16, // not implemented
|
||||
READ_WRITE_MULTIPLE_REGISTERS = 0x17, // not implemented
|
||||
READ_FIFO_QUEUE = 0x18, // not implemented
|
||||
};
|
||||
|
||||
enum class ModbusRegisterType : uint8_t {
|
||||
CUSTOM = 0x0,
|
||||
COIL = 0x01,
|
||||
DISCRETE_INPUT = 0x02,
|
||||
HOLDING = 0x03,
|
||||
READ = 0x04,
|
||||
};
|
||||
using modbus::ModbusFunctionCode;
|
||||
using modbus::ModbusRegisterType;
|
||||
using modbus::ModbusExceptionCode;
|
||||
|
||||
enum class SensorValueType : uint8_t {
|
||||
RAW = 0x00, // variable length
|
||||
@@ -256,6 +230,12 @@ class SensorItem {
|
||||
bool force_new_range{false};
|
||||
};
|
||||
|
||||
struct ServerCourtesyResponse {
|
||||
bool enabled{false};
|
||||
uint16_t register_last_address{0xFFFF};
|
||||
uint16_t register_value{0};
|
||||
};
|
||||
|
||||
class ServerRegister {
|
||||
using ReadLambda = std::function<int64_t()>;
|
||||
using WriteLambda = std::function<bool(int64_t value)>;
|
||||
@@ -530,6 +510,12 @@ class ModbusController : public PollingComponent, public modbus::ModbusDevice {
|
||||
void set_max_cmd_retries(uint8_t max_cmd_retries) { this->max_cmd_retries_ = max_cmd_retries; }
|
||||
/// get how many times a command will be (re)sent if no response is received
|
||||
uint8_t get_max_cmd_retries() { return this->max_cmd_retries_; }
|
||||
/// Called by esphome generated code to set the server courtesy response object
|
||||
void set_server_courtesy_response(const ServerCourtesyResponse &server_courtesy_response) {
|
||||
this->server_courtesy_response_ = server_courtesy_response;
|
||||
}
|
||||
/// Get the server courtesy response object
|
||||
ServerCourtesyResponse get_server_courtesy_response() const { return this->server_courtesy_response_; }
|
||||
|
||||
protected:
|
||||
/// parse sensormap_ and create range of sequential addresses
|
||||
@@ -572,6 +558,9 @@ class ModbusController : public PollingComponent, public modbus::ModbusDevice {
|
||||
CallbackManager<void(int, int)> online_callback_{};
|
||||
/// Server offline callback
|
||||
CallbackManager<void(int, int)> offline_callback_{};
|
||||
/// Server courtesy response
|
||||
ServerCourtesyResponse server_courtesy_response_{
|
||||
.enabled = false, .register_last_address = 0xFFFF, .register_value = 0};
|
||||
};
|
||||
|
||||
/** Convert vector<uint8_t> response payload to float.
|
||||
|
@@ -7,7 +7,7 @@
|
||||
|
||||
#include "opentherm.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#if defined(ESP32) || defined(USE_ESP_IDF)
|
||||
#ifdef USE_ESP32
|
||||
#include "driver/timer.h"
|
||||
#include "esp_err.h"
|
||||
#endif
|
||||
@@ -31,7 +31,7 @@ OpenTherm *OpenTherm::instance = nullptr;
|
||||
OpenTherm::OpenTherm(InternalGPIOPin *in_pin, InternalGPIOPin *out_pin, int32_t device_timeout)
|
||||
: in_pin_(in_pin),
|
||||
out_pin_(out_pin),
|
||||
#if defined(ESP32) || defined(USE_ESP_IDF)
|
||||
#ifdef USE_ESP32
|
||||
timer_group_(TIMER_GROUP_0),
|
||||
timer_idx_(TIMER_0),
|
||||
#endif
|
||||
@@ -57,7 +57,7 @@ bool OpenTherm::initialize() {
|
||||
this->out_pin_->setup();
|
||||
this->out_pin_->digital_write(true);
|
||||
|
||||
#if defined(ESP32) || defined(USE_ESP_IDF)
|
||||
#ifdef USE_ESP32
|
||||
return this->init_esp32_timer_();
|
||||
#else
|
||||
return true;
|
||||
@@ -238,7 +238,7 @@ void IRAM_ATTR OpenTherm::write_bit_(uint8_t high, uint8_t clock) {
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(ESP32) || defined(USE_ESP_IDF)
|
||||
#ifdef USE_ESP32
|
||||
|
||||
bool OpenTherm::init_esp32_timer_() {
|
||||
// Search for a free timer. Maybe unstable, we'll see.
|
||||
@@ -365,7 +365,7 @@ void IRAM_ATTR OpenTherm::stop_timer_() {
|
||||
}
|
||||
}
|
||||
|
||||
#endif // END ESP32
|
||||
#endif // USE_ESP32
|
||||
|
||||
#ifdef ESP8266
|
||||
// 5 kHz timer_
|
||||
|
@@ -12,7 +12,7 @@
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
#if defined(ESP32) || defined(USE_ESP_IDF)
|
||||
#ifdef USE_ESP32
|
||||
#include "driver/timer.h"
|
||||
#endif
|
||||
|
||||
@@ -356,7 +356,7 @@ class OpenTherm {
|
||||
ISRInternalGPIOPin isr_in_pin_;
|
||||
ISRInternalGPIOPin isr_out_pin_;
|
||||
|
||||
#if defined(ESP32) || defined(USE_ESP_IDF)
|
||||
#ifdef USE_ESP32
|
||||
timer_group_t timer_group_;
|
||||
timer_idx_t timer_idx_;
|
||||
#endif
|
||||
@@ -370,7 +370,7 @@ class OpenTherm {
|
||||
int32_t timeout_counter_; // <0 no timeout
|
||||
int32_t device_timeout_;
|
||||
|
||||
#if defined(ESP32) || defined(USE_ESP_IDF)
|
||||
#ifdef USE_ESP32
|
||||
esp_err_t timer_error_ = ESP_OK;
|
||||
TimerErrorType timer_error_type_ = TimerErrorType::NO_TIMER_ERROR;
|
||||
|
||||
|
@@ -155,7 +155,7 @@ void OpenThreadSrpComponent::setup() {
|
||||
|
||||
// Set service name
|
||||
char *string = otSrpClientBuffersGetServiceEntryServiceNameString(entry, &size);
|
||||
std::string full_service = service.service_type + "." + service.proto;
|
||||
std::string full_service = std::string(MDNS_STR_ARG(service.service_type)) + "." + MDNS_STR_ARG(service.proto);
|
||||
if (full_service.size() > size) {
|
||||
ESP_LOGW(TAG, "Service name too long: %s", full_service.c_str());
|
||||
continue;
|
||||
@@ -180,10 +180,12 @@ void OpenThreadSrpComponent::setup() {
|
||||
entry->mService.mNumTxtEntries = service.txt_records.size();
|
||||
for (size_t i = 0; i < service.txt_records.size(); i++) {
|
||||
const auto &txt = service.txt_records[i];
|
||||
auto value = const_cast<TemplatableValue<std::string> &>(txt.value).value();
|
||||
txt_entries[i].mKey = strdup(txt.key.c_str());
|
||||
txt_entries[i].mValue = reinterpret_cast<const uint8_t *>(strdup(value.c_str()));
|
||||
txt_entries[i].mValueLength = value.size();
|
||||
// Value is either a compile-time string literal in flash or a pointer to dynamic_txt_values_
|
||||
// OpenThread SRP client expects the data to persist, so we strdup it
|
||||
const char *value_str = MDNS_STR_ARG(txt.value);
|
||||
txt_entries[i].mKey = MDNS_STR_ARG(txt.key);
|
||||
txt_entries[i].mValue = reinterpret_cast<const uint8_t *>(strdup(value_str));
|
||||
txt_entries[i].mValueLength = strlen(value_str);
|
||||
}
|
||||
entry->mService.mTxtEntries = txt_entries;
|
||||
entry->mService.mNumTxtEntries = service.txt_records.size();
|
||||
|
5
esphome/components/split_buffer/__init__.py
Normal file
5
esphome/components/split_buffer/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
CODEOWNERS = ["@jesserockz"]
|
||||
|
||||
# Allows split_buffer to be configured in yaml, to allow use of the C++ api.
|
||||
|
||||
CONFIG_SCHEMA = {}
|
133
esphome/components/split_buffer/split_buffer.cpp
Normal file
133
esphome/components/split_buffer/split_buffer.cpp
Normal file
@@ -0,0 +1,133 @@
|
||||
#include "split_buffer.h"
|
||||
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
|
||||
namespace esphome::split_buffer {
|
||||
|
||||
static constexpr const char *const TAG = "split_buffer";
|
||||
|
||||
SplitBuffer::~SplitBuffer() { this->free(); }
|
||||
|
||||
bool SplitBuffer::init(size_t total_length) {
|
||||
this->free(); // Clean up any existing allocation
|
||||
|
||||
if (total_length == 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this->total_length_ = total_length;
|
||||
size_t current_buffer_size = total_length;
|
||||
|
||||
RAMAllocator<uint8_t *> ptr_allocator;
|
||||
RAMAllocator<uint8_t> allocator;
|
||||
|
||||
// Try to allocate the entire buffer first
|
||||
while (current_buffer_size > 0) {
|
||||
// Calculate how many buffers we need of this size
|
||||
size_t needed_buffers = (total_length + current_buffer_size - 1) / current_buffer_size;
|
||||
|
||||
// Try to allocate array of buffer pointers
|
||||
uint8_t **temp_buffers = ptr_allocator.allocate(needed_buffers);
|
||||
if (temp_buffers == nullptr) {
|
||||
// If we can't even allocate the pointer array, don't need to continue
|
||||
ESP_LOGE(TAG, "Failed to allocate pointers");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Initialize all pointers to null
|
||||
for (size_t i = 0; i < needed_buffers; i++) {
|
||||
temp_buffers[i] = nullptr;
|
||||
}
|
||||
|
||||
// Try to allocate all the buffers
|
||||
bool allocation_success = true;
|
||||
for (size_t i = 0; i < needed_buffers; i++) {
|
||||
size_t this_buffer_size = current_buffer_size;
|
||||
// Last buffer might be smaller if total_length is not divisible by current_buffer_size
|
||||
if (i == needed_buffers - 1 && total_length % current_buffer_size != 0) {
|
||||
this_buffer_size = total_length % current_buffer_size;
|
||||
}
|
||||
|
||||
temp_buffers[i] = allocator.allocate(this_buffer_size);
|
||||
if (temp_buffers[i] == nullptr) {
|
||||
allocation_success = false;
|
||||
break;
|
||||
}
|
||||
|
||||
// Initialize buffer to zero
|
||||
memset(temp_buffers[i], 0, this_buffer_size);
|
||||
}
|
||||
|
||||
if (allocation_success) {
|
||||
// Success! Store the result
|
||||
this->buffers_ = temp_buffers;
|
||||
this->buffer_count_ = needed_buffers;
|
||||
this->buffer_size_ = current_buffer_size;
|
||||
ESP_LOGD(TAG, "Allocated %zu * %zu bytes - %zu bytes", this->buffer_count_, this->buffer_size_,
|
||||
this->total_length_);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Allocation failed, clean up and try smaller buffers
|
||||
for (size_t i = 0; i < needed_buffers; i++) {
|
||||
if (temp_buffers[i] != nullptr) {
|
||||
allocator.deallocate(temp_buffers[i], 0);
|
||||
}
|
||||
}
|
||||
ptr_allocator.deallocate(temp_buffers, 0);
|
||||
|
||||
// Halve the buffer size and try again
|
||||
current_buffer_size = current_buffer_size / 2;
|
||||
}
|
||||
|
||||
ESP_LOGE(TAG, "Failed to allocate %zu bytes", total_length);
|
||||
return false;
|
||||
}
|
||||
|
||||
void SplitBuffer::free() {
|
||||
if (this->buffers_ != nullptr) {
|
||||
RAMAllocator<uint8_t> allocator;
|
||||
for (size_t i = 0; i < this->buffer_count_; i++) {
|
||||
if (this->buffers_[i] != nullptr) {
|
||||
allocator.deallocate(this->buffers_[i], 0);
|
||||
}
|
||||
}
|
||||
RAMAllocator<uint8_t *> ptr_allocator;
|
||||
ptr_allocator.deallocate(this->buffers_, 0);
|
||||
this->buffers_ = nullptr;
|
||||
}
|
||||
this->buffer_count_ = 0;
|
||||
this->buffer_size_ = 0;
|
||||
this->total_length_ = 0;
|
||||
}
|
||||
|
||||
uint8_t &SplitBuffer::operator[](size_t index) {
|
||||
if (index >= this->total_length_) {
|
||||
ESP_LOGE(TAG, "Out of bounds - %zu >= %zu", index, this->total_length_);
|
||||
// Return reference to a static dummy byte to avoid crash
|
||||
static uint8_t dummy = 0;
|
||||
return dummy;
|
||||
}
|
||||
|
||||
size_t buffer_index = index / this->buffer_size_;
|
||||
size_t offset_in_buffer = index - this->buffer_size_ * buffer_index;
|
||||
|
||||
return this->buffers_[buffer_index][offset_in_buffer];
|
||||
}
|
||||
|
||||
const uint8_t &SplitBuffer::operator[](size_t index) const {
|
||||
if (index >= this->total_length_) {
|
||||
ESP_LOGE(TAG, "Out of bounds - %zu >= %zu", index, this->total_length_);
|
||||
// Return reference to a static dummy byte to avoid crash
|
||||
static const uint8_t DUMMY = 0;
|
||||
return DUMMY;
|
||||
}
|
||||
|
||||
size_t buffer_index = index / this->buffer_size_;
|
||||
size_t offset_in_buffer = index - this->buffer_size_ * buffer_index;
|
||||
|
||||
return this->buffers_[buffer_index][offset_in_buffer];
|
||||
}
|
||||
|
||||
} // namespace esphome::split_buffer
|
40
esphome/components/split_buffer/split_buffer.h
Normal file
40
esphome/components/split_buffer/split_buffer.h
Normal file
@@ -0,0 +1,40 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <cstdlib>
|
||||
|
||||
namespace esphome::split_buffer {
|
||||
|
||||
class SplitBuffer {
|
||||
public:
|
||||
SplitBuffer() = default;
|
||||
~SplitBuffer();
|
||||
|
||||
// Initialize the buffer with the desired total length
|
||||
bool init(size_t total_length);
|
||||
|
||||
// Free all allocated buffers
|
||||
void free();
|
||||
|
||||
// Access operators
|
||||
uint8_t &operator[](size_t index);
|
||||
const uint8_t &operator[](size_t index) const;
|
||||
|
||||
// Get the total length
|
||||
size_t size() const { return this->total_length_; }
|
||||
|
||||
// Get buffer information
|
||||
size_t get_buffer_count() const { return this->buffer_count_; }
|
||||
size_t get_buffer_size() const { return this->buffer_size_; }
|
||||
|
||||
// Check if successfully initialized
|
||||
bool is_valid() const { return this->buffers_ != nullptr && this->buffer_count_ > 0; }
|
||||
|
||||
private:
|
||||
uint8_t **buffers_{nullptr};
|
||||
size_t buffer_count_{0};
|
||||
size_t buffer_size_{0};
|
||||
size_t total_length_{0};
|
||||
};
|
||||
|
||||
} // namespace esphome::split_buffer
|
@@ -347,7 +347,7 @@ def final_validate_device_schema(
|
||||
|
||||
def validate_pin(opt, device):
|
||||
def validator(value):
|
||||
if opt in device:
|
||||
if opt in device and not CORE.testing_mode:
|
||||
raise cv.Invalid(
|
||||
f"The uart {opt} is used both by {name} and {device[opt]}, "
|
||||
f"but can only be used by one. Please create a new uart bus for {name}."
|
||||
|
@@ -647,7 +647,7 @@ class AddDynamicAutoLoadsValidationStep(ConfigValidationStep):
|
||||
"""
|
||||
|
||||
# Has to happen after normal schema is validated and before final schema validation
|
||||
priority = -10.0
|
||||
priority = -5.0
|
||||
|
||||
def __init__(self, path: ConfigPath, comp: ComponentManifest) -> None:
|
||||
self.path = path
|
||||
|
@@ -4,7 +4,7 @@ from enum import Enum
|
||||
|
||||
from esphome.enum import StrEnum
|
||||
|
||||
__version__ = "2025.10.0-dev"
|
||||
__version__ = "2025.11.0-dev"
|
||||
|
||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||
VALID_SUBSTITUTIONS_CHARACTERS = (
|
||||
|
@@ -529,6 +529,8 @@ class EsphomeCore:
|
||||
self.dashboard = False
|
||||
# True if command is run from vscode api
|
||||
self.vscode = False
|
||||
# True if running in testing mode (disables validation checks for grouped testing)
|
||||
self.testing_mode = False
|
||||
# The name of the node
|
||||
self.name: str | None = None
|
||||
# The friendly name of the node
|
||||
@@ -703,15 +705,6 @@ class EsphomeCore:
|
||||
def relative_piolibdeps_path(self, *path: str | Path) -> Path:
|
||||
return self.relative_build_path(".piolibdeps", *path)
|
||||
|
||||
@property
|
||||
def platformio_cache_dir(self) -> str:
|
||||
"""Get the PlatformIO cache directory path."""
|
||||
# Check if running in Docker/HA addon with custom cache dir
|
||||
if (cache_dir := os.environ.get("PLATFORMIO_CACHE_DIR")) and cache_dir.strip():
|
||||
return cache_dir
|
||||
# Default PlatformIO cache location
|
||||
return os.path.expanduser("~/.platformio/.cache")
|
||||
|
||||
@property
|
||||
def firmware_bin(self) -> Path:
|
||||
if self.is_libretiny:
|
||||
|
@@ -340,8 +340,8 @@ void Application::calculate_looping_components_() {
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-reserve vector to avoid reallocations
|
||||
this->looping_components_.reserve(total_looping);
|
||||
// Initialize FixedVector with exact size - no reallocation possible
|
||||
this->looping_components_.init(total_looping);
|
||||
|
||||
// Add all components with loop override that aren't already LOOP_DONE
|
||||
// Some components (like logger) may call disable_loop() during initialization
|
||||
|
@@ -472,7 +472,7 @@ class Application {
|
||||
// - When a component is enabled, it's swapped with the first inactive component
|
||||
// and active_end_ is incremented
|
||||
// - This eliminates branch mispredictions from flag checking in the hot loop
|
||||
std::vector<Component *> looping_components_{};
|
||||
FixedVector<Component *> looping_components_{};
|
||||
#ifdef USE_SOCKET_SELECT_SUPPORT
|
||||
std::vector<int> socket_fds_; // Vector of all monitored socket file descriptors
|
||||
#endif
|
||||
|
@@ -84,6 +84,7 @@
|
||||
#define USE_LVGL_TOUCHSCREEN
|
||||
#define USE_MDNS
|
||||
#define MDNS_SERVICE_COUNT 3
|
||||
#define MDNS_DYNAMIC_TXT_COUNT 3
|
||||
#define USE_MEDIA_PLAYER
|
||||
#define USE_NEXTION_TFT_UPLOAD
|
||||
#define USE_NUMBER
|
||||
@@ -174,6 +175,8 @@
|
||||
#define USE_ESP32_BLE_SERVER_DESCRIPTOR_ON_WRITE
|
||||
#define USE_ESP32_BLE_SERVER_ON_CONNECT
|
||||
#define USE_ESP32_BLE_SERVER_ON_DISCONNECT
|
||||
#define ESPHOME_ESP32_BLE_TRACKER_LISTENER_COUNT 1
|
||||
#define ESPHOME_ESP32_BLE_TRACKER_CLIENT_COUNT 1
|
||||
#define USE_ESP32_CAMERA_JPEG_ENCODER
|
||||
#define USE_I2C
|
||||
#define USE_IMPROV
|
||||
|
@@ -246,12 +246,15 @@ def entity_duplicate_validator(platform: str) -> Callable[[ConfigType], ConfigTy
|
||||
"\n to distinguish them"
|
||||
)
|
||||
|
||||
raise cv.Invalid(
|
||||
f"Duplicate {platform} entity with name '{entity_name}' found{device_prefix}. "
|
||||
f"{conflict_msg}. "
|
||||
"Each entity on a device must have a unique name within its platform."
|
||||
f"{sanitized_msg}"
|
||||
)
|
||||
# Skip duplicate entity name validation when testing_mode is enabled
|
||||
# This flag is used for grouped component testing
|
||||
if not CORE.testing_mode:
|
||||
raise cv.Invalid(
|
||||
f"Duplicate {platform} entity with name '{entity_name}' found{device_prefix}. "
|
||||
f"{conflict_msg}. "
|
||||
"Each entity on a device must have a unique name within its platform."
|
||||
f"{sanitized_msg}"
|
||||
)
|
||||
|
||||
# Store metadata about this entity
|
||||
entity_metadata: EntityMetadata = {
|
||||
|
@@ -390,8 +390,10 @@ int8_t step_to_accuracy_decimals(float step) {
|
||||
return str.length() - dot_pos - 1;
|
||||
}
|
||||
|
||||
// Store BASE64 characters as array - automatically placed in flash/ROM on embedded platforms
|
||||
static const char BASE64_CHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
||||
// Use C-style string constant to store in ROM instead of RAM (saves 24 bytes)
|
||||
static constexpr const char *BASE64_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
||||
"abcdefghijklmnopqrstuvwxyz"
|
||||
"0123456789+/";
|
||||
|
||||
// Helper function to find the index of a base64 character in the lookup table.
|
||||
// Returns the character's position (0-63) if found, or 0 if not found.
|
||||
@@ -401,8 +403,8 @@ static const char BASE64_CHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqr
|
||||
// stops processing at the first invalid character due to the is_base64() check in its
|
||||
// while loop condition, making this edge case harmless in practice.
|
||||
static inline uint8_t base64_find_char(char c) {
|
||||
const void *ptr = memchr(BASE64_CHARS, c, sizeof(BASE64_CHARS));
|
||||
return ptr ? (static_cast<const char *>(ptr) - BASE64_CHARS) : 0;
|
||||
const char *pos = strchr(BASE64_CHARS, c);
|
||||
return pos ? (pos - BASE64_CHARS) : 0;
|
||||
}
|
||||
|
||||
static inline bool is_base64(char c) { return (isalnum(c) || (c == '+') || (c == '/')); }
|
||||
|
@@ -143,9 +143,6 @@ template<typename T, size_t N> class StaticVector {
|
||||
size_t size() const { return count_; }
|
||||
bool empty() const { return count_ == 0; }
|
||||
|
||||
// Direct access to size counter for efficient in-place construction
|
||||
size_t &count() { return count_; }
|
||||
|
||||
T &operator[](size_t i) { return data_[i]; }
|
||||
const T &operator[](size_t i) const { return data_[i]; }
|
||||
|
||||
@@ -162,6 +159,54 @@ template<typename T, size_t N> class StaticVector {
|
||||
const_reverse_iterator rend() const { return const_reverse_iterator(begin()); }
|
||||
};
|
||||
|
||||
/// Fixed-capacity vector - allocates once at runtime, never reallocates
|
||||
/// This avoids std::vector template overhead (_M_realloc_insert, _M_default_append)
|
||||
/// when size is known at initialization but not at compile time
|
||||
template<typename T> class FixedVector {
|
||||
private:
|
||||
T *data_{nullptr};
|
||||
size_t size_{0};
|
||||
size_t capacity_{0};
|
||||
|
||||
public:
|
||||
FixedVector() = default;
|
||||
|
||||
~FixedVector() {
|
||||
if (data_ != nullptr) {
|
||||
delete[] data_;
|
||||
}
|
||||
}
|
||||
|
||||
// Disable copy to avoid accidental copies
|
||||
FixedVector(const FixedVector &) = delete;
|
||||
FixedVector &operator=(const FixedVector &) = delete;
|
||||
|
||||
// Allocate capacity - can only be called once on empty vector
|
||||
void init(size_t n) {
|
||||
if (data_ == nullptr && n > 0) {
|
||||
data_ = new T[n];
|
||||
capacity_ = n;
|
||||
size_ = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// Add element without bounds checking
|
||||
/// Caller must ensure sufficient capacity was allocated via init()
|
||||
/// Silently ignores pushes beyond capacity (no exception or assertion)
|
||||
void push_back(const T &value) {
|
||||
if (size_ < capacity_) {
|
||||
data_[size_++] = value;
|
||||
}
|
||||
}
|
||||
|
||||
size_t size() const { return size_; }
|
||||
|
||||
/// Access element without bounds checking (matches std::vector behavior)
|
||||
/// Caller must ensure index is valid (i < size())
|
||||
T &operator[](size_t i) { return data_[i]; }
|
||||
const T &operator[](size_t i) const { return data_[i]; }
|
||||
};
|
||||
|
||||
///@}
|
||||
|
||||
/// @name Mathematics
|
||||
|
@@ -95,9 +95,10 @@ class Scheduler {
|
||||
} name_;
|
||||
uint32_t interval;
|
||||
// Split time to handle millis() rollover. The scheduler combines the 32-bit millis()
|
||||
// with a 16-bit rollover counter to create a 48-bit time space (stored as 64-bit
|
||||
// for compatibility). With 49.7 days per 32-bit rollover, the 16-bit counter
|
||||
// supports 49.7 days × 65536 = ~8900 years. This ensures correct scheduling
|
||||
// with a 16-bit rollover counter to create a 48-bit time space (using 32+16 bits).
|
||||
// This is intentionally limited to 48 bits, not stored as a full 64-bit value.
|
||||
// With 49.7 days per 32-bit rollover, the 16-bit counter supports
|
||||
// 49.7 days × 65536 = ~8900 years. This ensures correct scheduling
|
||||
// even when devices run for months. Split into two fields for better memory
|
||||
// alignment on 32-bit systems.
|
||||
uint32_t next_execution_low_; // Lower 32 bits of execution time (millis value)
|
||||
|
@@ -118,11 +118,11 @@ class PinRegistry(dict):
|
||||
parent_config = fconf.get_config_for_path(parent_path)
|
||||
final_val_fun(pin_config, parent_config)
|
||||
allow_others = pin_config.get(CONF_ALLOW_OTHER_USES, False)
|
||||
if count != 1 and not allow_others:
|
||||
if count != 1 and not allow_others and not CORE.testing_mode:
|
||||
raise cv.Invalid(
|
||||
f"Pin {pin_config[CONF_NUMBER]} is used in multiple places"
|
||||
)
|
||||
if count == 1 and allow_others:
|
||||
if count == 1 and allow_others and not CORE.testing_mode:
|
||||
raise cv.Invalid(
|
||||
f"Pin {pin_config[CONF_NUMBER]} incorrectly sets {CONF_ALLOW_OTHER_USES}: true"
|
||||
)
|
||||
|
@@ -43,6 +43,35 @@ def patch_structhash():
|
||||
cli.clean_build_dir = patched_clean_build_dir
|
||||
|
||||
|
||||
def patch_file_downloader():
|
||||
"""Patch PlatformIO's FileDownloader to retry on PackageException errors."""
|
||||
from platformio.package.download import FileDownloader
|
||||
from platformio.package.exception import PackageException
|
||||
|
||||
original_init = FileDownloader.__init__
|
||||
|
||||
def patched_init(self, *args: Any, **kwargs: Any) -> None:
|
||||
max_retries = 3
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
return original_init(self, *args, **kwargs)
|
||||
except PackageException as e:
|
||||
if attempt < max_retries - 1:
|
||||
_LOGGER.warning(
|
||||
"Package download failed: %s. Retrying... (attempt %d/%d)",
|
||||
str(e),
|
||||
attempt + 1,
|
||||
max_retries,
|
||||
)
|
||||
else:
|
||||
# Final attempt - re-raise
|
||||
raise
|
||||
return None
|
||||
|
||||
FileDownloader.__init__ = patched_init
|
||||
|
||||
|
||||
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
|
||||
FILTER_PLATFORMIO_LINES = [
|
||||
r"Verbose mode can be enabled via `-v, --verbose` option.*",
|
||||
@@ -75,6 +104,9 @@ FILTER_PLATFORMIO_LINES = [
|
||||
r"Creating BIN file .*",
|
||||
r"Warning! Could not find file \".*.crt\"",
|
||||
r"Warning! Arduino framework as an ESP-IDF component doesn't handle the `variant` field! The default `esp32` variant will be used.",
|
||||
r"Warning: DEPRECATED: 'esptool.py' is deprecated. Please use 'esptool' instead. The '.py' suffix will be removed in a future major release.",
|
||||
r"Warning: esp-idf-size exited with code 2",
|
||||
r"esp_idf_size: error: unrecognized arguments: --ng",
|
||||
]
|
||||
|
||||
|
||||
@@ -97,6 +129,7 @@ def run_platformio_cli(*args, **kwargs) -> str | int:
|
||||
import platformio.__main__
|
||||
|
||||
patch_structhash()
|
||||
patch_file_downloader()
|
||||
return run_external_command(platformio.__main__.main, *cmd, **kwargs)
|
||||
|
||||
|
||||
@@ -112,16 +145,7 @@ def run_compile(config, verbose):
|
||||
args = []
|
||||
if CONF_COMPILE_PROCESS_LIMIT in config[CONF_ESPHOME]:
|
||||
args += [f"-j{config[CONF_ESPHOME][CONF_COMPILE_PROCESS_LIMIT]}"]
|
||||
result = run_platformio_cli_run(config, verbose, *args)
|
||||
|
||||
# Run memory analysis if enabled
|
||||
if config.get(CONF_ESPHOME, {}).get("analyze_memory", False):
|
||||
try:
|
||||
analyze_memory_usage(config)
|
||||
except Exception as e:
|
||||
_LOGGER.warning("Failed to analyze memory usage: %s", e)
|
||||
|
||||
return result
|
||||
return run_platformio_cli_run(config, verbose, *args)
|
||||
|
||||
|
||||
def _run_idedata(config):
|
||||
@@ -350,93 +374,3 @@ class IDEData:
|
||||
return f"{self.cc_path[:-7]}addr2line.exe"
|
||||
|
||||
return f"{self.cc_path[:-3]}addr2line"
|
||||
|
||||
@property
|
||||
def objdump_path(self) -> str:
|
||||
# replace gcc at end with objdump
|
||||
|
||||
# Windows
|
||||
if self.cc_path.endswith(".exe"):
|
||||
return f"{self.cc_path[:-7]}objdump.exe"
|
||||
|
||||
return f"{self.cc_path[:-3]}objdump"
|
||||
|
||||
@property
|
||||
def readelf_path(self) -> str:
|
||||
# replace gcc at end with readelf
|
||||
|
||||
# Windows
|
||||
if self.cc_path.endswith(".exe"):
|
||||
return f"{self.cc_path[:-7]}readelf.exe"
|
||||
|
||||
return f"{self.cc_path[:-3]}readelf"
|
||||
|
||||
|
||||
def analyze_memory_usage(config: dict[str, Any]) -> None:
|
||||
"""Analyze memory usage by component after compilation."""
|
||||
# Lazy import to avoid overhead when not needed
|
||||
from esphome.analyze_memory import MemoryAnalyzer
|
||||
|
||||
idedata = get_idedata(config)
|
||||
|
||||
# Get paths to tools
|
||||
elf_path = idedata.firmware_elf_path
|
||||
objdump_path = idedata.objdump_path
|
||||
readelf_path = idedata.readelf_path
|
||||
|
||||
# Debug logging
|
||||
_LOGGER.debug("ELF path from idedata: %s", elf_path)
|
||||
|
||||
# Check if file exists
|
||||
if not Path(elf_path).exists():
|
||||
# Try alternate path
|
||||
alt_path = Path(CORE.relative_build_path(".pioenvs", CORE.name, "firmware.elf"))
|
||||
if alt_path.exists():
|
||||
elf_path = str(alt_path)
|
||||
_LOGGER.debug("Using alternate ELF path: %s", elf_path)
|
||||
else:
|
||||
_LOGGER.warning("ELF file not found at %s or %s", elf_path, alt_path)
|
||||
return
|
||||
|
||||
# Extract external components from config
|
||||
external_components = set()
|
||||
|
||||
# Get the list of built-in ESPHome components
|
||||
from esphome.analyze_memory import get_esphome_components
|
||||
|
||||
builtin_components = get_esphome_components()
|
||||
|
||||
# Special non-component keys that appear in configs
|
||||
NON_COMPONENT_KEYS = {
|
||||
CONF_ESPHOME,
|
||||
"substitutions",
|
||||
"packages",
|
||||
"globals",
|
||||
"<<",
|
||||
}
|
||||
|
||||
# Check all top-level keys in config
|
||||
for key in config:
|
||||
if key not in builtin_components and key not in NON_COMPONENT_KEYS:
|
||||
# This is an external component
|
||||
external_components.add(key)
|
||||
|
||||
_LOGGER.debug("Detected external components: %s", external_components)
|
||||
|
||||
# Create analyzer and run analysis
|
||||
analyzer = MemoryAnalyzer(elf_path, objdump_path, readelf_path, external_components)
|
||||
analyzer.analyze()
|
||||
|
||||
# Generate and print report
|
||||
report = analyzer.generate_report()
|
||||
_LOGGER.info("\n%s", report)
|
||||
|
||||
# Optionally save to file
|
||||
if config.get(CONF_ESPHOME, {}).get("memory_report_file"):
|
||||
report_file = Path(config[CONF_ESPHOME]["memory_report_file"])
|
||||
if report_file.suffix == ".json":
|
||||
report_file.write_text(analyzer.to_json())
|
||||
_LOGGER.info("Memory report saved to %s", report_file)
|
||||
else:
|
||||
report_file.write_text(report)
|
||||
_LOGGER.info("Memory report saved to %s", report_file)
|
||||
|
@@ -147,7 +147,7 @@ lib_deps =
|
||||
makuna/NeoPixelBus@2.8.0 ; neopixelbus
|
||||
esphome/ESP32-audioI2S@2.3.0 ; i2s_audio
|
||||
droscy/esp_wireguard@0.4.2 ; wireguard
|
||||
esphome/esp-audio-libs@1.1.4 ; audio
|
||||
esphome/esp-audio-libs@2.0.1 ; audio
|
||||
|
||||
build_flags =
|
||||
${common:arduino.build_flags}
|
||||
@@ -170,7 +170,7 @@ lib_deps =
|
||||
${common:idf.lib_deps}
|
||||
droscy/esp_wireguard@0.4.2 ; wireguard
|
||||
kahrendt/ESPMicroSpeechFeatures@1.1.0 ; micro_wake_word
|
||||
esphome/esp-audio-libs@1.1.4 ; audio
|
||||
esphome/esp-audio-libs@2.0.1 ; audio
|
||||
build_flags =
|
||||
${common:idf.build_flags}
|
||||
-Wno-nonnull-compare
|
||||
|
@@ -11,8 +11,8 @@ pyserial==3.5
|
||||
platformio==6.1.18 # When updating platformio, also update /docker/Dockerfile
|
||||
esptool==5.1.0
|
||||
click==8.1.7
|
||||
esphome-dashboard==20250904.0
|
||||
aioesphomeapi==41.11.0
|
||||
esphome-dashboard==20251009.0
|
||||
aioesphomeapi==41.13.0
|
||||
zeroconf==0.148.0
|
||||
puremagic==1.30
|
||||
ruamel.yaml==0.18.15 # dashboard_import
|
||||
|
@@ -1,7 +1,7 @@
|
||||
pylint==3.3.9
|
||||
flake8==7.3.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.13.3 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.20.0 # also change in .pre-commit-config.yaml when updating
|
||||
ruff==0.14.0 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==3.21.0 # also change in .pre-commit-config.yaml when updating
|
||||
pre-commit
|
||||
|
||||
# Unit tests
|
||||
|
523
script/analyze_component_buses.py
Executable file
523
script/analyze_component_buses.py
Executable file
@@ -0,0 +1,523 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Analyze component test files to detect which common bus configs they use.
|
||||
|
||||
This script scans component test files and extracts which common bus configurations
|
||||
(i2c, spi, uart, etc.) are included via the packages mechanism. This information
|
||||
is used to group components that can be tested together.
|
||||
|
||||
Components can only be grouped together if they use the EXACT SAME set of common
|
||||
bus configurations, ensuring that merged configs are compatible.
|
||||
|
||||
Example output:
|
||||
{
|
||||
"component1": {
|
||||
"esp32-ard": ["i2c", "uart_19200"],
|
||||
"esp32-idf": ["i2c", "uart_19200"]
|
||||
},
|
||||
"component2": {
|
||||
"esp32-ard": ["spi"],
|
||||
"esp32-idf": ["spi"]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from functools import lru_cache
|
||||
import json
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
# Add esphome to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from esphome import yaml_util
|
||||
from esphome.config_helpers import Extend, Remove
|
||||
|
||||
# Path to common bus configs
|
||||
COMMON_BUS_PATH = Path("tests/test_build_components/common")
|
||||
|
||||
# Package dependencies - maps packages to the packages they include
|
||||
# When a component uses a package on the left, it automatically gets
|
||||
# the packages on the right as well
|
||||
PACKAGE_DEPENDENCIES = {
|
||||
"modbus": ["uart"], # modbus packages include uart packages
|
||||
# Add more package dependencies here as needed
|
||||
}
|
||||
|
||||
# Bus types that can be defined directly in config files
|
||||
# Components defining these directly cannot be grouped (they create unique bus IDs)
|
||||
DIRECT_BUS_TYPES = ("i2c", "spi", "uart", "modbus")
|
||||
|
||||
# Signature for components with no bus requirements
|
||||
# These components can be merged with any other group
|
||||
NO_BUSES_SIGNATURE = "no_buses"
|
||||
|
||||
# Base bus components - these ARE the bus implementations and should not
|
||||
# be flagged as needing migration since they are the platform/base components
|
||||
BASE_BUS_COMPONENTS = {
|
||||
"i2c",
|
||||
"spi",
|
||||
"uart",
|
||||
"modbus",
|
||||
"canbus",
|
||||
}
|
||||
|
||||
# Components that must be tested in isolation (not grouped or batched with others)
|
||||
# These have known build issues that prevent grouping
|
||||
# NOTE: This should be kept in sync with both test_build_components and split_components_for_ci.py
|
||||
ISOLATED_COMPONENTS = {
|
||||
"animation": "Has display lambda in common.yaml that requires existing display platform - breaks when merged without display",
|
||||
"esphome": "Defines devices/areas in esphome: section that are referenced in other sections - breaks when merged",
|
||||
"ethernet": "Defines ethernet: which conflicts with wifi: used by most components",
|
||||
"ethernet_info": "Related to ethernet component which conflicts with wifi",
|
||||
"lvgl": "Defines multiple SDL displays on host platform that conflict when merged with other display configs",
|
||||
"matrix_keypad": "Needs isolation due to keypad",
|
||||
"mcp4725": "no YAML config to specify i2c bus id",
|
||||
"mcp47a1": "no YAML config to specify i2c bus id",
|
||||
"modbus_controller": "Defines multiple modbus buses for testing client/server functionality - conflicts with package modbus bus",
|
||||
"neopixelbus": "RMT type conflict with ESP32 Arduino/ESP-IDF headers (enum vs struct rmt_channel_t)",
|
||||
"packages": "cannot merge packages",
|
||||
}
|
||||
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def get_common_bus_packages() -> frozenset[str]:
|
||||
"""Get the list of common bus package names.
|
||||
|
||||
Reads from tests/test_build_components/common/ directory
|
||||
and caches the result. All bus types support component grouping
|
||||
for config validation since --testing-mode bypasses runtime conflicts.
|
||||
|
||||
Returns:
|
||||
Frozenset of common bus package names (i2c, spi, uart, etc.)
|
||||
"""
|
||||
if not COMMON_BUS_PATH.exists():
|
||||
return frozenset()
|
||||
|
||||
# List all directories in common/ - these are the bus package names
|
||||
return frozenset(d.name for d in COMMON_BUS_PATH.iterdir() if d.is_dir())
|
||||
|
||||
|
||||
def uses_local_file_references(component_dir: Path) -> bool:
|
||||
"""Check if a component uses local file references via $component_dir.
|
||||
|
||||
Components that reference local files cannot be grouped because each needs
|
||||
a unique component_dir path pointing to their specific directory.
|
||||
|
||||
Args:
|
||||
component_dir: Path to the component's test directory
|
||||
|
||||
Returns:
|
||||
True if the component uses $component_dir for local file references
|
||||
"""
|
||||
common_yaml = component_dir / "common.yaml"
|
||||
if not common_yaml.exists():
|
||||
return False
|
||||
|
||||
try:
|
||||
content = common_yaml.read_text()
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
return False
|
||||
|
||||
# Pattern to match $component_dir or ${component_dir} references
|
||||
# These indicate local file usage that prevents grouping
|
||||
return bool(re.search(r"\$\{?component_dir\}?", content))
|
||||
|
||||
|
||||
def is_platform_component(component_dir: Path) -> bool:
|
||||
"""Check if a component is a platform component (abstract base class).
|
||||
|
||||
Platform components have IS_PLATFORM_COMPONENT = True and cannot be
|
||||
instantiated without a platform-specific implementation. These components
|
||||
define abstract methods and cause linker errors if compiled standalone.
|
||||
|
||||
Examples: canbus, mcp23x08_base, mcp23x17_base
|
||||
|
||||
Args:
|
||||
component_dir: Path to the component's test directory
|
||||
|
||||
Returns:
|
||||
True if this is a platform component
|
||||
"""
|
||||
# Check in the actual component source, not tests
|
||||
# tests/components/X -> tests/components -> tests -> repo root
|
||||
repo_root = component_dir.parent.parent.parent
|
||||
comp_init = (
|
||||
repo_root / "esphome" / "components" / component_dir.name / "__init__.py"
|
||||
)
|
||||
|
||||
if not comp_init.exists():
|
||||
return False
|
||||
|
||||
try:
|
||||
content = comp_init.read_text()
|
||||
return "IS_PLATFORM_COMPONENT = True" in content
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
return False
|
||||
|
||||
|
||||
def _contains_extend_or_remove(data: Any) -> bool:
|
||||
"""Recursively check if data contains Extend or Remove objects.
|
||||
|
||||
Args:
|
||||
data: Parsed YAML data structure
|
||||
|
||||
Returns:
|
||||
True if any Extend or Remove objects are found
|
||||
"""
|
||||
if isinstance(data, (Extend, Remove)):
|
||||
return True
|
||||
|
||||
if isinstance(data, dict):
|
||||
for value in data.values():
|
||||
if _contains_extend_or_remove(value):
|
||||
return True
|
||||
|
||||
if isinstance(data, list):
|
||||
for item in data:
|
||||
if _contains_extend_or_remove(item):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def analyze_yaml_file(yaml_file: Path) -> dict[str, Any]:
|
||||
"""Load a YAML file once and extract all needed information.
|
||||
|
||||
This loads the YAML file a single time and extracts all information needed
|
||||
for component analysis, avoiding multiple file reads.
|
||||
|
||||
Args:
|
||||
yaml_file: Path to the YAML file to analyze
|
||||
|
||||
Returns:
|
||||
Dictionary with keys:
|
||||
- buses: set of common bus package names
|
||||
- has_extend_remove: bool indicating if Extend/Remove objects are present
|
||||
- has_direct_bus_config: bool indicating if buses are defined directly (not via packages)
|
||||
- loaded: bool indicating if file was successfully loaded
|
||||
"""
|
||||
result = {
|
||||
"buses": set(),
|
||||
"has_extend_remove": False,
|
||||
"has_direct_bus_config": False,
|
||||
"loaded": False,
|
||||
}
|
||||
|
||||
if not yaml_file.exists():
|
||||
return result
|
||||
|
||||
try:
|
||||
data = yaml_util.load_yaml(yaml_file)
|
||||
result["loaded"] = True
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
return result
|
||||
|
||||
# Check for Extend/Remove objects
|
||||
result["has_extend_remove"] = _contains_extend_or_remove(data)
|
||||
|
||||
# Check if buses are defined directly (not via packages)
|
||||
# Components that define i2c, spi, uart, or modbus directly in test files
|
||||
# cannot be grouped because they create unique bus IDs
|
||||
if isinstance(data, dict):
|
||||
for bus_type in DIRECT_BUS_TYPES:
|
||||
if bus_type in data:
|
||||
result["has_direct_bus_config"] = True
|
||||
break
|
||||
|
||||
# Extract common bus packages
|
||||
if not isinstance(data, dict) or "packages" not in data:
|
||||
return result
|
||||
|
||||
packages = data["packages"]
|
||||
if not isinstance(packages, dict):
|
||||
return result
|
||||
|
||||
valid_buses = get_common_bus_packages()
|
||||
for pkg_name in packages:
|
||||
if pkg_name not in valid_buses:
|
||||
continue
|
||||
result["buses"].add(pkg_name)
|
||||
# Add any package dependencies (e.g., modbus includes uart)
|
||||
if pkg_name not in PACKAGE_DEPENDENCIES:
|
||||
continue
|
||||
for dep in PACKAGE_DEPENDENCIES[pkg_name]:
|
||||
if dep not in valid_buses:
|
||||
continue
|
||||
result["buses"].add(dep)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def analyze_component(component_dir: Path) -> tuple[dict[str, list[str]], bool, bool]:
|
||||
"""Analyze a component directory to find which buses each platform uses.
|
||||
|
||||
Args:
|
||||
component_dir: Path to the component's test directory
|
||||
|
||||
Returns:
|
||||
Tuple of:
|
||||
- Dictionary mapping platform to list of bus configs
|
||||
Example: {"esp32-ard": ["i2c", "spi"], "esp32-idf": ["i2c"]}
|
||||
- Boolean indicating if component uses !extend or !remove
|
||||
- Boolean indicating if component defines buses directly (not via packages)
|
||||
"""
|
||||
if not component_dir.is_dir():
|
||||
return {}, False, False
|
||||
|
||||
platform_buses = {}
|
||||
has_extend_remove = False
|
||||
has_direct_bus_config = False
|
||||
|
||||
# Analyze all YAML files in the component directory
|
||||
for yaml_file in component_dir.glob("*.yaml"):
|
||||
analysis = analyze_yaml_file(yaml_file)
|
||||
|
||||
# Track if any file uses extend/remove
|
||||
if analysis["has_extend_remove"]:
|
||||
has_extend_remove = True
|
||||
|
||||
# Track if any file defines buses directly
|
||||
if analysis["has_direct_bus_config"]:
|
||||
has_direct_bus_config = True
|
||||
|
||||
# For test.*.yaml files, extract platform and buses
|
||||
if yaml_file.name.startswith("test.") and yaml_file.suffix == ".yaml":
|
||||
# Extract platform name (e.g., test.esp32-ard.yaml -> esp32-ard)
|
||||
platform = yaml_file.stem.replace("test.", "")
|
||||
# Always add platform, even if it has no buses (empty list)
|
||||
# This allows grouping components that don't use any shared buses
|
||||
platform_buses[platform] = (
|
||||
sorted(analysis["buses"]) if analysis["buses"] else []
|
||||
)
|
||||
|
||||
return platform_buses, has_extend_remove, has_direct_bus_config
|
||||
|
||||
|
||||
def analyze_all_components(
|
||||
tests_dir: Path = None,
|
||||
) -> tuple[dict[str, dict[str, list[str]]], set[str], set[str]]:
|
||||
"""Analyze all component test directories.
|
||||
|
||||
Args:
|
||||
tests_dir: Path to tests/components directory (defaults to auto-detect)
|
||||
|
||||
Returns:
|
||||
Tuple of:
|
||||
- Dictionary mapping component name to platform->buses mapping
|
||||
- Set of component names that cannot be grouped
|
||||
- Set of component names that define buses directly (need migration warning)
|
||||
"""
|
||||
if tests_dir is None:
|
||||
tests_dir = Path("tests/components")
|
||||
|
||||
if not tests_dir.exists():
|
||||
print(f"Error: {tests_dir} does not exist", file=sys.stderr)
|
||||
return {}, set(), set()
|
||||
|
||||
components = {}
|
||||
non_groupable = set()
|
||||
direct_bus_components = set()
|
||||
|
||||
for component_dir in sorted(tests_dir.iterdir()):
|
||||
if not component_dir.is_dir():
|
||||
continue
|
||||
|
||||
component_name = component_dir.name
|
||||
platform_buses, has_extend_remove, has_direct_bus_config = analyze_component(
|
||||
component_dir
|
||||
)
|
||||
|
||||
if platform_buses:
|
||||
components[component_name] = platform_buses
|
||||
|
||||
# Note: Components using $component_dir are now groupable because the merge
|
||||
# script rewrites these to absolute paths with component-specific substitutions
|
||||
|
||||
# Check if component is explicitly isolated
|
||||
# These have known issues that prevent grouping with other components
|
||||
if component_name in ISOLATED_COMPONENTS:
|
||||
non_groupable.add(component_name)
|
||||
|
||||
# Check if component is a base bus component
|
||||
# These ARE the bus platform implementations and define buses directly for testing
|
||||
# They cannot be grouped with components that use bus packages (causes ID conflicts)
|
||||
if component_name in BASE_BUS_COMPONENTS:
|
||||
non_groupable.add(component_name)
|
||||
|
||||
# Check if component uses !extend or !remove directives
|
||||
# These rely on specific config structure and cannot be merged with other components
|
||||
# The directives work within a component's own package hierarchy but break when
|
||||
# merging independent components together
|
||||
if has_extend_remove:
|
||||
non_groupable.add(component_name)
|
||||
|
||||
# Check if component defines buses directly in test files
|
||||
# These create unique bus IDs and cause conflicts when merged
|
||||
# Exclude base bus components (i2c, spi, uart, etc.) since they ARE the platform
|
||||
if has_direct_bus_config and component_name not in BASE_BUS_COMPONENTS:
|
||||
non_groupable.add(component_name)
|
||||
direct_bus_components.add(component_name)
|
||||
|
||||
return components, non_groupable, direct_bus_components
|
||||
|
||||
|
||||
def create_grouping_signature(
|
||||
platform_buses: dict[str, list[str]], platform: str
|
||||
) -> str:
|
||||
"""Create a signature string for grouping components.
|
||||
|
||||
Components with the same signature can be grouped together for testing.
|
||||
All valid bus types can be grouped since --testing-mode bypasses runtime
|
||||
conflicts during config validation.
|
||||
|
||||
Args:
|
||||
platform_buses: Mapping of platform to list of buses
|
||||
platform: The specific platform to create signature for
|
||||
|
||||
Returns:
|
||||
Signature string (e.g., "i2c" or "uart") or empty if no valid buses
|
||||
"""
|
||||
buses = platform_buses.get(platform, [])
|
||||
if not buses:
|
||||
return ""
|
||||
|
||||
# Only include valid bus types in signature
|
||||
common_buses = get_common_bus_packages()
|
||||
valid_buses = [b for b in buses if b in common_buses]
|
||||
if not valid_buses:
|
||||
return ""
|
||||
|
||||
return "+".join(sorted(valid_buses))
|
||||
|
||||
|
||||
def group_components_by_signature(
|
||||
components: dict[str, dict[str, list[str]]], platform: str
|
||||
) -> dict[str, list[str]]:
|
||||
"""Group components by their bus signature for a specific platform.
|
||||
|
||||
Args:
|
||||
components: Component analysis results from analyze_all_components()
|
||||
platform: Platform to group for (e.g., "esp32-ard")
|
||||
|
||||
Returns:
|
||||
Dictionary mapping signature to list of component names
|
||||
Example: {"i2c+uart_19200": ["comp1", "comp2"], "spi": ["comp3"]}
|
||||
"""
|
||||
signature_groups: dict[str, list[str]] = {}
|
||||
|
||||
for component_name, platform_buses in components.items():
|
||||
if platform not in platform_buses:
|
||||
continue
|
||||
|
||||
signature = create_grouping_signature(platform_buses, platform)
|
||||
if not signature:
|
||||
continue
|
||||
|
||||
if signature not in signature_groups:
|
||||
signature_groups[signature] = []
|
||||
signature_groups[signature].append(component_name)
|
||||
|
||||
return signature_groups
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Analyze component test files to detect common bus usage"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--components",
|
||||
"-c",
|
||||
nargs="+",
|
||||
help="Specific components to analyze (default: all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--platform",
|
||||
"-p",
|
||||
help="Show grouping for a specific platform",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--json",
|
||||
action="store_true",
|
||||
help="Output as JSON",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--group",
|
||||
action="store_true",
|
||||
help="Show component groupings by bus signature",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Analyze components
|
||||
tests_dir = Path("tests/components")
|
||||
|
||||
if args.components:
|
||||
# Analyze only specified components
|
||||
components = {}
|
||||
non_groupable = set()
|
||||
direct_bus_components = set()
|
||||
for comp in args.components:
|
||||
comp_dir = tests_dir / comp
|
||||
platform_buses, has_extend_remove, has_direct_bus_config = (
|
||||
analyze_component(comp_dir)
|
||||
)
|
||||
if platform_buses:
|
||||
components[comp] = platform_buses
|
||||
# Note: Components using $component_dir are now groupable
|
||||
if comp in ISOLATED_COMPONENTS:
|
||||
non_groupable.add(comp)
|
||||
if comp in BASE_BUS_COMPONENTS:
|
||||
non_groupable.add(comp)
|
||||
if has_direct_bus_config and comp not in BASE_BUS_COMPONENTS:
|
||||
non_groupable.add(comp)
|
||||
direct_bus_components.add(comp)
|
||||
else:
|
||||
# Analyze all components
|
||||
components, non_groupable, direct_bus_components = analyze_all_components(
|
||||
tests_dir
|
||||
)
|
||||
|
||||
# Output results
|
||||
if args.group and args.platform:
|
||||
# Show groupings for a specific platform
|
||||
groups = group_components_by_signature(components, args.platform)
|
||||
|
||||
if args.json:
|
||||
print(json.dumps(groups, indent=2))
|
||||
else:
|
||||
print(f"Component groupings for {args.platform}:")
|
||||
print()
|
||||
for signature, comp_list in sorted(groups.items()):
|
||||
print(f" {signature}:")
|
||||
for comp in sorted(comp_list):
|
||||
print(f" - {comp}")
|
||||
print()
|
||||
elif args.json:
|
||||
# JSON output
|
||||
print(json.dumps(components, indent=2))
|
||||
else:
|
||||
# Human-readable output
|
||||
for component, platform_buses in sorted(components.items()):
|
||||
non_groupable_marker = (
|
||||
" [NON-GROUPABLE]" if component in non_groupable else ""
|
||||
)
|
||||
print(f"{component}{non_groupable_marker}:")
|
||||
for platform, buses in sorted(platform_buses.items()):
|
||||
bus_str = ", ".join(buses)
|
||||
print(f" {platform}: {bus_str}")
|
||||
print()
|
||||
print(f"Total components analyzed: {len(components)}")
|
||||
if non_groupable:
|
||||
print(f"Non-groupable components (use local files): {len(non_groupable)}")
|
||||
for comp in sorted(non_groupable):
|
||||
print(f" - {comp}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -237,6 +237,16 @@ def main() -> None:
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
changed_components = parse_list_components_output(result.stdout)
|
||||
|
||||
# Filter to only components that have test files
|
||||
# Components without tests shouldn't generate CI test jobs
|
||||
tests_dir = Path(root_path) / "tests" / "components"
|
||||
changed_components_with_tests = [
|
||||
component
|
||||
for component in changed_components
|
||||
if (component_test_dir := tests_dir / component).exists()
|
||||
and any(component_test_dir.glob("test.*.yaml"))
|
||||
]
|
||||
|
||||
# Build output
|
||||
output: dict[str, Any] = {
|
||||
"integration_tests": run_integration,
|
||||
@@ -244,7 +254,8 @@ def main() -> None:
|
||||
"clang_format": run_clang_format,
|
||||
"python_linters": run_python_linters,
|
||||
"changed_components": changed_components,
|
||||
"component_test_count": len(changed_components),
|
||||
"changed_components_with_tests": changed_components_with_tests,
|
||||
"component_test_count": len(changed_components_with_tests),
|
||||
}
|
||||
|
||||
# Output as JSON
|
||||
|
379
script/merge_component_configs.py
Executable file
379
script/merge_component_configs.py
Executable file
@@ -0,0 +1,379 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Merge multiple component test configurations into a single test file.
|
||||
|
||||
This script combines multiple component test files that use the same common bus
|
||||
configurations into a single merged test file. This allows testing multiple
|
||||
compatible components together, reducing CI build time.
|
||||
|
||||
The merger handles:
|
||||
- Component-specific substitutions (prefixing to avoid conflicts)
|
||||
- Multiple instances of component configurations
|
||||
- Shared common bus packages (included only once)
|
||||
- Platform-specific configurations
|
||||
- Uses ESPHome's built-in merge_config for proper YAML merging
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import re
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
# Add esphome to path so we can import from it
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from esphome import yaml_util
|
||||
from esphome.config_helpers import merge_config
|
||||
from script.analyze_component_buses import PACKAGE_DEPENDENCIES, get_common_bus_packages
|
||||
|
||||
|
||||
def load_yaml_file(yaml_file: Path) -> dict:
|
||||
"""Load YAML file using ESPHome's YAML loader.
|
||||
|
||||
Args:
|
||||
yaml_file: Path to the YAML file
|
||||
|
||||
Returns:
|
||||
Parsed YAML as dictionary
|
||||
"""
|
||||
if not yaml_file.exists():
|
||||
raise FileNotFoundError(f"YAML file not found: {yaml_file}")
|
||||
|
||||
return yaml_util.load_yaml(yaml_file)
|
||||
|
||||
|
||||
def extract_packages_from_yaml(data: dict) -> dict[str, str]:
|
||||
"""Extract COMMON BUS package includes from parsed YAML.
|
||||
|
||||
Only extracts packages that are from test_build_components/common/,
|
||||
ignoring component-specific packages.
|
||||
|
||||
Args:
|
||||
data: Parsed YAML dictionary
|
||||
|
||||
Returns:
|
||||
Dictionary mapping package name to include path (as string representation)
|
||||
Only includes common bus packages (i2c, spi, uart, etc.)
|
||||
"""
|
||||
if "packages" not in data:
|
||||
return {}
|
||||
|
||||
packages_value = data["packages"]
|
||||
if not isinstance(packages_value, dict):
|
||||
# List format doesn't include common bus packages (those use dict format)
|
||||
return {}
|
||||
|
||||
# Get common bus package names (cached)
|
||||
common_bus_packages = get_common_bus_packages()
|
||||
packages = {}
|
||||
|
||||
# Dictionary format: packages: {name: value}
|
||||
for name, value in packages_value.items():
|
||||
# Only include common bus packages, ignore component-specific ones
|
||||
if name not in common_bus_packages:
|
||||
continue
|
||||
packages[name] = str(value)
|
||||
# Also track package dependencies (e.g., modbus includes uart)
|
||||
if name not in PACKAGE_DEPENDENCIES:
|
||||
continue
|
||||
for dep in PACKAGE_DEPENDENCIES[name]:
|
||||
if dep not in common_bus_packages:
|
||||
continue
|
||||
# Mark as included via dependency
|
||||
packages[f"_dep_{dep}"] = f"(included via {name})"
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def prefix_substitutions_in_dict(
|
||||
data: Any, prefix: str, exclude: set[str] | None = None
|
||||
) -> Any:
|
||||
"""Recursively prefix all substitution references in a data structure.
|
||||
|
||||
Args:
|
||||
data: YAML data structure (dict, list, or scalar)
|
||||
prefix: Prefix to add to substitution names
|
||||
exclude: Set of substitution names to exclude from prefixing
|
||||
|
||||
Returns:
|
||||
Data structure with prefixed substitution references
|
||||
"""
|
||||
if exclude is None:
|
||||
exclude = set()
|
||||
|
||||
def replace_sub(text: str) -> str:
|
||||
"""Replace substitution references in a string."""
|
||||
|
||||
def replace_match(match):
|
||||
sub_name = match.group(1)
|
||||
if sub_name in exclude:
|
||||
return match.group(0)
|
||||
# Always use braced format in output for consistency
|
||||
return f"${{{prefix}_{sub_name}}}"
|
||||
|
||||
# Match both ${substitution} and $substitution formats
|
||||
return re.sub(r"\$\{?(\w+)\}?", replace_match, text)
|
||||
|
||||
if isinstance(data, dict):
|
||||
result = {}
|
||||
for key, value in data.items():
|
||||
result[key] = prefix_substitutions_in_dict(value, prefix, exclude)
|
||||
return result
|
||||
if isinstance(data, list):
|
||||
return [prefix_substitutions_in_dict(item, prefix, exclude) for item in data]
|
||||
if isinstance(data, str):
|
||||
return replace_sub(data)
|
||||
return data
|
||||
|
||||
|
||||
def deduplicate_by_id(data: dict) -> dict:
|
||||
"""Deduplicate list items with the same ID.
|
||||
|
||||
Keeps only the first occurrence of each ID. If items with the same ID
|
||||
are identical, this silently deduplicates. If they differ, the first
|
||||
one is kept (ESPHome's validation will catch if this causes issues).
|
||||
|
||||
Args:
|
||||
data: Parsed config dictionary
|
||||
|
||||
Returns:
|
||||
Config with deduplicated lists
|
||||
"""
|
||||
if not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
result = {}
|
||||
for key, value in data.items():
|
||||
if isinstance(value, list):
|
||||
# Check for items with 'id' field
|
||||
seen_ids = set()
|
||||
deduped_list = []
|
||||
|
||||
for item in value:
|
||||
if isinstance(item, dict) and "id" in item:
|
||||
item_id = item["id"]
|
||||
if item_id not in seen_ids:
|
||||
seen_ids.add(item_id)
|
||||
deduped_list.append(item)
|
||||
# else: skip duplicate ID (keep first occurrence)
|
||||
else:
|
||||
# No ID, just add it
|
||||
deduped_list.append(item)
|
||||
|
||||
result[key] = deduped_list
|
||||
elif isinstance(value, dict):
|
||||
# Recursively deduplicate nested dicts
|
||||
result[key] = deduplicate_by_id(value)
|
||||
else:
|
||||
result[key] = value
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def merge_component_configs(
|
||||
component_names: list[str],
|
||||
platform: str,
|
||||
tests_dir: Path,
|
||||
output_file: Path,
|
||||
) -> None:
|
||||
"""Merge multiple component test configs into a single file.
|
||||
|
||||
Args:
|
||||
component_names: List of component names to merge
|
||||
platform: Platform to merge for (e.g., "esp32-ard")
|
||||
tests_dir: Path to tests/components directory
|
||||
output_file: Path to output merged config file
|
||||
"""
|
||||
if not component_names:
|
||||
raise ValueError("No components specified")
|
||||
|
||||
# Track packages to ensure they're identical
|
||||
all_packages = None
|
||||
|
||||
# Start with empty config
|
||||
merged_config_data = {}
|
||||
|
||||
# Process each component
|
||||
for comp_name in component_names:
|
||||
comp_dir = tests_dir / comp_name
|
||||
test_file = comp_dir / f"test.{platform}.yaml"
|
||||
|
||||
if not test_file.exists():
|
||||
raise FileNotFoundError(f"Test file not found: {test_file}")
|
||||
|
||||
# Load the component's test file
|
||||
comp_data = load_yaml_file(test_file)
|
||||
|
||||
# Validate packages are compatible
|
||||
# Components with no packages (no_buses) can merge with any group
|
||||
comp_packages = extract_packages_from_yaml(comp_data)
|
||||
|
||||
if all_packages is None:
|
||||
# First component - set the baseline
|
||||
all_packages = comp_packages
|
||||
elif not comp_packages:
|
||||
# This component has no packages (no_buses) - it can merge with any group
|
||||
pass
|
||||
elif not all_packages:
|
||||
# Previous components had no packages, but this one does - adopt these packages
|
||||
all_packages = comp_packages
|
||||
elif comp_packages != all_packages:
|
||||
# Both have packages but they differ - this is an error
|
||||
raise ValueError(
|
||||
f"Component {comp_name} has different packages than previous components. "
|
||||
f"Expected: {all_packages}, Got: {comp_packages}. "
|
||||
f"All components must use the same common bus configs to be merged."
|
||||
)
|
||||
|
||||
# Handle $component_dir by replacing with absolute path
|
||||
# This allows components that use local file references to be grouped
|
||||
comp_abs_dir = str(comp_dir.absolute())
|
||||
|
||||
# Save top-level substitutions BEFORE expanding packages
|
||||
# In ESPHome, top-level substitutions override package substitutions
|
||||
top_level_subs = (
|
||||
comp_data["substitutions"].copy()
|
||||
if "substitutions" in comp_data and comp_data["substitutions"] is not None
|
||||
else {}
|
||||
)
|
||||
|
||||
# Expand packages - but we'll restore substitution priority after
|
||||
if "packages" in comp_data:
|
||||
packages_value = comp_data["packages"]
|
||||
|
||||
if isinstance(packages_value, dict):
|
||||
# Dict format - check each package
|
||||
common_bus_packages = get_common_bus_packages()
|
||||
for pkg_name, pkg_value in list(packages_value.items()):
|
||||
if pkg_name in common_bus_packages:
|
||||
continue
|
||||
if not isinstance(pkg_value, dict):
|
||||
continue
|
||||
# Component-specific package - expand its content into top level
|
||||
comp_data = merge_config(comp_data, pkg_value)
|
||||
elif isinstance(packages_value, list):
|
||||
# List format - expand all package includes
|
||||
for pkg_value in packages_value:
|
||||
if not isinstance(pkg_value, dict):
|
||||
continue
|
||||
comp_data = merge_config(comp_data, pkg_value)
|
||||
|
||||
# Remove all packages (common will be re-added at the end)
|
||||
del comp_data["packages"]
|
||||
|
||||
# Restore top-level substitution priority
|
||||
# Top-level substitutions override any from packages
|
||||
if "substitutions" not in comp_data or comp_data["substitutions"] is None:
|
||||
comp_data["substitutions"] = {}
|
||||
|
||||
# Merge: package subs as base, top-level subs override
|
||||
comp_data["substitutions"].update(top_level_subs)
|
||||
|
||||
# Now prefix the final merged substitutions
|
||||
comp_data["substitutions"] = {
|
||||
f"{comp_name}_{sub_name}": sub_value
|
||||
for sub_name, sub_value in comp_data["substitutions"].items()
|
||||
}
|
||||
|
||||
# Add component_dir substitution with absolute path for this component
|
||||
comp_data["substitutions"][f"{comp_name}_component_dir"] = comp_abs_dir
|
||||
|
||||
# Prefix substitution references throughout the config
|
||||
comp_data = prefix_substitutions_in_dict(comp_data, comp_name)
|
||||
|
||||
# Use ESPHome's merge_config to merge this component into the result
|
||||
# merge_config handles list merging with ID-based deduplication automatically
|
||||
merged_config_data = merge_config(merged_config_data, comp_data)
|
||||
|
||||
# Add packages back (only once, since they're identical)
|
||||
# IMPORTANT: Only re-add common bus packages (spi, i2c, uart, etc.)
|
||||
# Do NOT re-add component-specific packages as they contain unprefixed $component_dir refs
|
||||
if all_packages:
|
||||
first_comp_data = load_yaml_file(
|
||||
tests_dir / component_names[0] / f"test.{platform}.yaml"
|
||||
)
|
||||
if "packages" in first_comp_data and isinstance(
|
||||
first_comp_data["packages"], dict
|
||||
):
|
||||
# Filter to only include common bus packages
|
||||
# Only dict format can contain common bus packages
|
||||
common_bus_packages = get_common_bus_packages()
|
||||
filtered_packages = {
|
||||
name: value
|
||||
for name, value in first_comp_data["packages"].items()
|
||||
if name in common_bus_packages
|
||||
}
|
||||
if filtered_packages:
|
||||
merged_config_data["packages"] = filtered_packages
|
||||
|
||||
# Deduplicate items with same ID (keeps first occurrence)
|
||||
merged_config_data = deduplicate_by_id(merged_config_data)
|
||||
|
||||
# Remove esphome section since it will be provided by the wrapper file
|
||||
# The wrapper file includes this merged config via packages and provides
|
||||
# the proper esphome: section with name, platform, etc.
|
||||
if "esphome" in merged_config_data:
|
||||
del merged_config_data["esphome"]
|
||||
|
||||
# Write merged config
|
||||
output_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
yaml_content = yaml_util.dump(merged_config_data)
|
||||
output_file.write_text(yaml_content)
|
||||
|
||||
print(f"Successfully merged {len(component_names)} components into {output_file}")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Merge multiple component test configs into a single file"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--components",
|
||||
"-c",
|
||||
required=True,
|
||||
help="Comma-separated list of component names to merge",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--platform",
|
||||
"-p",
|
||||
required=True,
|
||||
help="Platform to merge for (e.g., esp32-ard)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
required=True,
|
||||
type=Path,
|
||||
help="Output file path for merged config",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tests-dir",
|
||||
type=Path,
|
||||
default=Path("tests/components"),
|
||||
help="Path to tests/components directory",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
component_names = [c.strip() for c in args.components.split(",")]
|
||||
|
||||
try:
|
||||
merge_component_configs(
|
||||
component_names=component_names,
|
||||
platform=args.platform,
|
||||
tests_dir=args.tests_dir,
|
||||
output_file=args.output,
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error merging configs: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
268
script/split_components_for_ci.py
Executable file
268
script/split_components_for_ci.py
Executable file
@@ -0,0 +1,268 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Split components into batches with intelligent grouping.
|
||||
|
||||
This script analyzes components to identify which ones share common bus configurations
|
||||
and intelligently groups them into batches to maximize the efficiency of the
|
||||
component grouping system in CI.
|
||||
|
||||
Components with the same bus signature are placed in the same batch whenever possible,
|
||||
allowing the test_build_components.py script to merge them into single builds.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
import json
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
# Add esphome to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from script.analyze_component_buses import (
|
||||
ISOLATED_COMPONENTS,
|
||||
NO_BUSES_SIGNATURE,
|
||||
analyze_all_components,
|
||||
create_grouping_signature,
|
||||
)
|
||||
|
||||
# Weighting for batch creation
|
||||
# Isolated components can't be grouped/merged, so they count as 10x
|
||||
# Groupable components can be merged into single builds, so they count as 1x
|
||||
ISOLATED_WEIGHT = 10
|
||||
GROUPABLE_WEIGHT = 1
|
||||
|
||||
|
||||
def has_test_files(component_name: str, tests_dir: Path) -> bool:
|
||||
"""Check if a component has test files.
|
||||
|
||||
Args:
|
||||
component_name: Name of the component
|
||||
tests_dir: Path to tests/components directory
|
||||
|
||||
Returns:
|
||||
True if the component has test.*.yaml files
|
||||
"""
|
||||
component_dir = tests_dir / component_name
|
||||
if not component_dir.exists() or not component_dir.is_dir():
|
||||
return False
|
||||
|
||||
# Check for test.*.yaml files
|
||||
return any(component_dir.glob("test.*.yaml"))
|
||||
|
||||
|
||||
def create_intelligent_batches(
|
||||
components: list[str],
|
||||
tests_dir: Path,
|
||||
batch_size: int = 40,
|
||||
) -> list[list[str]]:
|
||||
"""Create batches optimized for component grouping.
|
||||
|
||||
Args:
|
||||
components: List of component names to batch
|
||||
tests_dir: Path to tests/components directory
|
||||
batch_size: Target size for each batch
|
||||
|
||||
Returns:
|
||||
List of component batches (lists of component names)
|
||||
"""
|
||||
# Filter out components without test files
|
||||
# Platform components like 'climate' and 'climate_ir' don't have test files
|
||||
components_with_tests = [
|
||||
comp for comp in components if has_test_files(comp, tests_dir)
|
||||
]
|
||||
|
||||
# Log filtered components to stderr for debugging
|
||||
if len(components_with_tests) < len(components):
|
||||
filtered_out = set(components) - set(components_with_tests)
|
||||
print(
|
||||
f"Note: Filtered {len(filtered_out)} components without test files: "
|
||||
f"{', '.join(sorted(filtered_out))}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
# Analyze all components to get their bus signatures
|
||||
component_buses, non_groupable, _direct_bus_components = analyze_all_components(
|
||||
tests_dir
|
||||
)
|
||||
|
||||
# Group components by their bus signature ONLY (ignore platform)
|
||||
# All platforms will be tested by test_build_components.py for each batch
|
||||
# Key: signature, Value: list of components
|
||||
signature_groups: dict[str, list[str]] = defaultdict(list)
|
||||
|
||||
for component in components_with_tests:
|
||||
# Components that can't be grouped get unique signatures
|
||||
# This includes both manually curated ISOLATED_COMPONENTS and
|
||||
# automatically detected non_groupable components
|
||||
# These can share a batch/runner but won't be grouped/merged
|
||||
if component in ISOLATED_COMPONENTS or component in non_groupable:
|
||||
signature_groups[f"isolated_{component}"].append(component)
|
||||
continue
|
||||
|
||||
# Get signature from any platform (they should all have the same buses)
|
||||
# Components not in component_buses were filtered out by has_test_files check
|
||||
comp_platforms = component_buses[component]
|
||||
for platform, buses in comp_platforms.items():
|
||||
if buses:
|
||||
signature = create_grouping_signature({platform: buses}, platform)
|
||||
# Group by signature only - platform doesn't matter for batching
|
||||
signature_groups[signature].append(component)
|
||||
break # Only use first platform for grouping
|
||||
else:
|
||||
# No buses found for any platform - can be grouped together
|
||||
signature_groups[NO_BUSES_SIGNATURE].append(component)
|
||||
|
||||
# Create batches by keeping signature groups together
|
||||
# Components with the same signature stay in the same batches
|
||||
batches = []
|
||||
|
||||
# Sort signature groups to prioritize groupable components
|
||||
# 1. Put "isolated_*" signatures last (can't be grouped with others)
|
||||
# 2. Sort groupable signatures by size (largest first)
|
||||
# 3. "no_buses" components CAN be grouped together
|
||||
def sort_key(item):
|
||||
signature, components = item
|
||||
is_isolated = signature.startswith("isolated_")
|
||||
# Put "isolated_*" last (1), groupable first (0)
|
||||
# Within each category, sort by size (largest first)
|
||||
return (is_isolated, -len(components))
|
||||
|
||||
sorted_groups = sorted(signature_groups.items(), key=sort_key)
|
||||
|
||||
# Strategy: Create batches using weighted sizes
|
||||
# - Isolated components count as 10x (since they can't be grouped/merged)
|
||||
# - Groupable components count as 1x (can be merged into single builds)
|
||||
# - This distributes isolated components across more runners
|
||||
# - Ensures each runner has a good mix of groupable vs isolated components
|
||||
|
||||
current_batch = []
|
||||
current_weight = 0
|
||||
|
||||
for signature, group_components in sorted_groups:
|
||||
is_isolated = signature.startswith("isolated_")
|
||||
weight_per_component = ISOLATED_WEIGHT if is_isolated else GROUPABLE_WEIGHT
|
||||
|
||||
for component in group_components:
|
||||
# Check if adding this component would exceed the batch size
|
||||
if current_weight + weight_per_component > batch_size and current_batch:
|
||||
# Start a new batch
|
||||
batches.append(current_batch)
|
||||
current_batch = []
|
||||
current_weight = 0
|
||||
|
||||
# Add component to current batch
|
||||
current_batch.append(component)
|
||||
current_weight += weight_per_component
|
||||
|
||||
# Don't forget the last batch
|
||||
if current_batch:
|
||||
batches.append(current_batch)
|
||||
|
||||
return batches
|
||||
|
||||
|
||||
def main() -> int:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Split components into intelligent batches for CI testing"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--components",
|
||||
"-c",
|
||||
required=True,
|
||||
help="JSON array of component names",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--batch-size",
|
||||
"-b",
|
||||
type=int,
|
||||
default=40,
|
||||
help="Target batch size (default: 40, weighted)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tests-dir",
|
||||
type=Path,
|
||||
default=Path("tests/components"),
|
||||
help="Path to tests/components directory",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
"-o",
|
||||
choices=["json", "github"],
|
||||
default="github",
|
||||
help="Output format (json or github for GitHub Actions)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Parse component list from JSON
|
||||
try:
|
||||
components = json.loads(args.components)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error parsing components JSON: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
if not isinstance(components, list):
|
||||
print("Components must be a JSON array", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Create intelligent batches
|
||||
batches = create_intelligent_batches(
|
||||
components=components,
|
||||
tests_dir=args.tests_dir,
|
||||
batch_size=args.batch_size,
|
||||
)
|
||||
|
||||
# Convert batches to space-separated strings for CI
|
||||
batch_strings = [" ".join(batch) for batch in batches]
|
||||
|
||||
if args.output == "json":
|
||||
# Output as JSON array
|
||||
print(json.dumps(batch_strings))
|
||||
else:
|
||||
# Output for GitHub Actions (set output)
|
||||
output_json = json.dumps(batch_strings)
|
||||
print(f"components={output_json}")
|
||||
|
||||
# Print summary to stderr so it shows in CI logs
|
||||
# Count actual components being batched
|
||||
actual_components = sum(len(batch.split()) for batch in batch_strings)
|
||||
|
||||
# Re-analyze to get isolated component counts for summary
|
||||
_, non_groupable, _ = analyze_all_components(args.tests_dir)
|
||||
|
||||
# Count isolated vs groupable components
|
||||
all_batched_components = [comp for batch in batches for comp in batch]
|
||||
isolated_count = sum(
|
||||
1
|
||||
for comp in all_batched_components
|
||||
if comp in ISOLATED_COMPONENTS or comp in non_groupable
|
||||
)
|
||||
groupable_count = actual_components - isolated_count
|
||||
|
||||
print("\n=== Intelligent Batch Summary ===", file=sys.stderr)
|
||||
print(f"Total components requested: {len(components)}", file=sys.stderr)
|
||||
print(f"Components with test files: {actual_components}", file=sys.stderr)
|
||||
print(f" - Groupable (weight=1): {groupable_count}", file=sys.stderr)
|
||||
print(f" - Isolated (weight=10): {isolated_count}", file=sys.stderr)
|
||||
if actual_components < len(components):
|
||||
print(
|
||||
f"Components skipped (no test files): {len(components) - actual_components}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(f"Number of batches: {len(batches)}", file=sys.stderr)
|
||||
print(f"Batch size target (weighted): {args.batch_size}", file=sys.stderr)
|
||||
if len(batches) > 0:
|
||||
print(
|
||||
f"Average components per batch: {actual_components / len(batches):.1f}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(file=sys.stderr)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
@@ -1,106 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
help() {
|
||||
echo "Usage: $0 [-e <config|compile|clean>] [-c <string>] [-t <string>]" 1>&2
|
||||
echo 1>&2
|
||||
echo " - e - Parameter for esphome command. Default compile. Common alternative is config." 1>&2
|
||||
echo " - c - Component folder name to test. Default *. E.g. '-c logger'." 1>&2
|
||||
echo " - t - Target name to test. Put '-t list' to display all possibilities. E.g. '-t esp32-s2-idf-51'." 1>&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Parse parameter:
|
||||
# - `e` - Parameter for `esphome` command. Default `compile`. Common alternative is `config`.
|
||||
# - `c` - Component folder name to test. Default `*`.
|
||||
esphome_command="compile"
|
||||
target_component="*"
|
||||
while getopts e:c:t: flag
|
||||
do
|
||||
case $flag in
|
||||
e) esphome_command=${OPTARG};;
|
||||
c) target_component=${OPTARG};;
|
||||
t) requested_target_platform=${OPTARG};;
|
||||
\?) help;;
|
||||
esac
|
||||
done
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
if ! [ -d "./tests/test_build_components/build" ]; then
|
||||
mkdir ./tests/test_build_components/build
|
||||
fi
|
||||
|
||||
start_esphome() {
|
||||
if [ -n "$requested_target_platform" ] && [ "$requested_target_platform" != "$target_platform_with_version" ]; then
|
||||
echo "Skipping $target_platform_with_version"
|
||||
return
|
||||
fi
|
||||
# create dynamic yaml file in `build` folder.
|
||||
# `./tests/test_build_components/build/[target_component].[test_name].[target_platform_with_version].yaml`
|
||||
component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform_with_version.yaml"
|
||||
|
||||
cp $target_platform_file $component_test_file
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
# macOS sed is...different
|
||||
sed -i '' "s!\$component_test_file!../../.$f!g" $component_test_file
|
||||
else
|
||||
sed -i "s!\$component_test_file!../../.$f!g" $component_test_file
|
||||
fi
|
||||
|
||||
# Start esphome process
|
||||
echo "> [$target_component] [$test_name] [$target_platform_with_version]"
|
||||
set -x
|
||||
# TODO: Validate escape of Command line substitution value
|
||||
python3 -m esphome -s component_name $target_component -s component_dir ../../components/$target_component -s test_name $test_name -s target_platform $target_platform $esphome_command $component_test_file
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
# Find all test yaml files.
|
||||
# - `./tests/components/[target_component]/[test_name].[target_platform].yaml`
|
||||
# - `./tests/components/[target_component]/[test_name].all.yaml`
|
||||
for f in ./tests/components/$target_component/*.*.yaml; do
|
||||
[ -f "$f" ] || continue
|
||||
IFS='/' read -r -a folder_name <<< "$f"
|
||||
target_component="${folder_name[3]}"
|
||||
|
||||
IFS='.' read -r -a file_name <<< "${folder_name[4]}"
|
||||
test_name="${file_name[0]}"
|
||||
target_platform="${file_name[1]}"
|
||||
file_name_parts=${#file_name[@]}
|
||||
|
||||
if [ "$target_platform" = "all" ] || [ $file_name_parts = 2 ]; then
|
||||
# Test has *not* defined a specific target platform. Need to run tests for all possible target platforms.
|
||||
|
||||
for target_platform_file in ./tests/test_build_components/build_components_base.*.yaml; do
|
||||
IFS='/' read -r -a folder_name <<< "$target_platform_file"
|
||||
IFS='.' read -r -a file_name <<< "${folder_name[3]}"
|
||||
target_platform="${file_name[1]}"
|
||||
|
||||
start_esphome
|
||||
done
|
||||
|
||||
else
|
||||
# Test has defined a specific target platform.
|
||||
|
||||
# Validate we have a base test yaml for selected platform.
|
||||
# The target_platform is sourced from the following location.
|
||||
# 1. `./tests/test_build_components/build_components_base.[target_platform].yaml`
|
||||
# 2. `./tests/test_build_components/build_components_base.[target_platform]-ard.yaml`
|
||||
target_platform_file="./tests/test_build_components/build_components_base.$target_platform.yaml"
|
||||
if ! [ -f "$target_platform_file" ]; then
|
||||
echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml] for component test [$f] found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
for target_platform_file in ./tests/test_build_components/build_components_base.$target_platform*.yaml; do
|
||||
# trim off "./tests/test_build_components/build_components_base." prefix
|
||||
target_platform_with_version=${target_platform_file:52}
|
||||
# ...now remove suffix starting with "." leaving just the test target hardware and software platform (possibly with version)
|
||||
# For example: "esp32-s3-idf-50"
|
||||
target_platform_with_version=${target_platform_with_version%.*}
|
||||
start_esphome
|
||||
done
|
||||
fi
|
||||
done
|
1
script/test_build_components
Symbolic link
1
script/test_build_components
Symbolic link
@@ -0,0 +1 @@
|
||||
test_build_components.py
|
931
script/test_build_components.py
Executable file
931
script/test_build_components.py
Executable file
@@ -0,0 +1,931 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Test ESPHome component builds with intelligent grouping.
|
||||
|
||||
This script replaces the bash test_build_components script with Python,
|
||||
adding support for intelligent component grouping based on shared bus
|
||||
configurations to reduce CI build time.
|
||||
|
||||
Features:
|
||||
- Analyzes components for shared common bus configs
|
||||
- Groups compatible components together
|
||||
- Merges configs for grouped components
|
||||
- Uses --testing-mode for grouped tests
|
||||
- Maintains backward compatibility with single component testing
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
import hashlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# Add esphome to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
# pylint: disable=wrong-import-position
|
||||
from script.analyze_component_buses import (
|
||||
BASE_BUS_COMPONENTS,
|
||||
ISOLATED_COMPONENTS,
|
||||
NO_BUSES_SIGNATURE,
|
||||
analyze_all_components,
|
||||
create_grouping_signature,
|
||||
is_platform_component,
|
||||
uses_local_file_references,
|
||||
)
|
||||
from script.merge_component_configs import merge_component_configs
|
||||
|
||||
# Platform-specific maximum group sizes
|
||||
# ESP8266 has limited IRAM and can't handle large component groups
|
||||
PLATFORM_MAX_GROUP_SIZE = {
|
||||
"esp8266-ard": 10, # ESP8266 Arduino has limited IRAM
|
||||
"esp8266-idf": 10, # ESP8266 IDF also has limited IRAM
|
||||
# BK72xx now uses BK7252 board (1.62MB flash vs 1.03MB) - no limit needed
|
||||
# Other platforms can handle larger groups
|
||||
}
|
||||
|
||||
|
||||
def show_disk_space_if_ci(esphome_command: str) -> None:
|
||||
"""Show disk space usage if running in CI during compile.
|
||||
|
||||
Args:
|
||||
esphome_command: The esphome command being run (config/compile/clean)
|
||||
"""
|
||||
if os.environ.get("GITHUB_ACTIONS") and esphome_command == "compile":
|
||||
print("\n" + "=" * 80)
|
||||
print("Disk Space After Build:")
|
||||
print("=" * 80)
|
||||
subprocess.run(["df", "-h"], check=False)
|
||||
print("=" * 80 + "\n")
|
||||
|
||||
|
||||
def find_component_tests(
|
||||
components_dir: Path, component_pattern: str = "*"
|
||||
) -> dict[str, list[Path]]:
|
||||
"""Find all component test files.
|
||||
|
||||
Args:
|
||||
components_dir: Path to tests/components directory
|
||||
component_pattern: Glob pattern for component names
|
||||
|
||||
Returns:
|
||||
Dictionary mapping component name to list of test files
|
||||
"""
|
||||
component_tests = defaultdict(list)
|
||||
|
||||
for comp_dir in components_dir.glob(component_pattern):
|
||||
if not comp_dir.is_dir():
|
||||
continue
|
||||
|
||||
for test_file in comp_dir.glob("test.*.yaml"):
|
||||
component_tests[comp_dir.name].append(test_file)
|
||||
|
||||
return dict(component_tests)
|
||||
|
||||
|
||||
def parse_test_filename(test_file: Path) -> tuple[str, str]:
|
||||
"""Parse test filename to extract test name and platform.
|
||||
|
||||
Args:
|
||||
test_file: Path to test file
|
||||
|
||||
Returns:
|
||||
Tuple of (test_name, platform)
|
||||
"""
|
||||
parts = test_file.stem.split(".")
|
||||
if len(parts) == 2:
|
||||
return parts[0], parts[1] # test, platform
|
||||
return parts[0], "all"
|
||||
|
||||
|
||||
def get_platform_base_files(base_dir: Path) -> dict[str, list[Path]]:
|
||||
"""Get all platform base files.
|
||||
|
||||
Args:
|
||||
base_dir: Path to test_build_components directory
|
||||
|
||||
Returns:
|
||||
Dictionary mapping platform to list of base files (for version variants)
|
||||
"""
|
||||
platform_files = defaultdict(list)
|
||||
|
||||
for base_file in base_dir.glob("build_components_base.*.yaml"):
|
||||
# Extract platform from filename
|
||||
# e.g., build_components_base.esp32-idf.yaml -> esp32-idf
|
||||
# or build_components_base.esp32-idf-50.yaml -> esp32-idf
|
||||
filename = base_file.stem
|
||||
parts = filename.replace("build_components_base.", "").split("-")
|
||||
|
||||
# Platform is everything before version number (if present)
|
||||
# Check if last part is a number (version)
|
||||
platform = "-".join(parts[:-1]) if parts[-1].isdigit() else "-".join(parts)
|
||||
|
||||
platform_files[platform].append(base_file)
|
||||
|
||||
return dict(platform_files)
|
||||
|
||||
|
||||
def extract_platform_with_version(base_file: Path) -> str:
|
||||
"""Extract platform with version from base filename.
|
||||
|
||||
Args:
|
||||
base_file: Path to base file
|
||||
|
||||
Returns:
|
||||
Platform with version (e.g., "esp32-idf-50" or "esp32-idf")
|
||||
"""
|
||||
# Remove "build_components_base." prefix and ".yaml" suffix
|
||||
return base_file.stem.replace("build_components_base.", "")
|
||||
|
||||
|
||||
def run_esphome_test(
|
||||
component: str,
|
||||
test_file: Path,
|
||||
platform: str,
|
||||
platform_with_version: str,
|
||||
base_file: Path,
|
||||
build_dir: Path,
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
use_testing_mode: bool = False,
|
||||
) -> tuple[bool, str]:
|
||||
"""Run esphome test for a single component.
|
||||
|
||||
Args:
|
||||
component: Component name
|
||||
test_file: Path to component test file
|
||||
platform: Platform name (e.g., "esp32-idf")
|
||||
platform_with_version: Platform with version (e.g., "esp32-idf-50")
|
||||
base_file: Path to platform base file
|
||||
build_dir: Path to build directory
|
||||
esphome_command: ESPHome command (config/compile)
|
||||
continue_on_fail: Whether to continue on failure
|
||||
use_testing_mode: Whether to use --testing-mode flag
|
||||
|
||||
Returns:
|
||||
Tuple of (success status, command string)
|
||||
"""
|
||||
test_name = test_file.stem.split(".")[0]
|
||||
|
||||
# Create dynamic test file in build directory
|
||||
output_file = build_dir / f"{component}.{test_name}.{platform_with_version}.yaml"
|
||||
|
||||
# Copy base file and substitute component test file reference
|
||||
base_content = base_file.read_text()
|
||||
# Get relative path from build dir to test file
|
||||
repo_root = Path(__file__).parent.parent
|
||||
component_test_ref = f"../../{test_file.relative_to(repo_root / 'tests')}"
|
||||
output_content = base_content.replace("$component_test_file", component_test_ref)
|
||||
output_file.write_text(output_content)
|
||||
|
||||
# Build esphome command
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"esphome",
|
||||
]
|
||||
|
||||
# Add --testing-mode if needed (must be before subcommand)
|
||||
if use_testing_mode:
|
||||
cmd.append("--testing-mode")
|
||||
|
||||
# Add substitutions
|
||||
cmd.extend(
|
||||
[
|
||||
"-s",
|
||||
"component_name",
|
||||
component,
|
||||
"-s",
|
||||
"component_dir",
|
||||
f"../../components/{component}",
|
||||
"-s",
|
||||
"test_name",
|
||||
test_name,
|
||||
"-s",
|
||||
"target_platform",
|
||||
platform,
|
||||
]
|
||||
)
|
||||
|
||||
# Add command and config file
|
||||
cmd.extend([esphome_command, str(output_file)])
|
||||
|
||||
# Build command string for display/logging
|
||||
cmd_str = " ".join(cmd)
|
||||
|
||||
# Run command
|
||||
print(f"> [{component}] [{test_name}] [{platform_with_version}]")
|
||||
if use_testing_mode:
|
||||
print(" (using --testing-mode)")
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, check=False)
|
||||
success = result.returncode == 0
|
||||
|
||||
# Show disk space after build in CI during compile
|
||||
show_disk_space_if_ci(esphome_command)
|
||||
|
||||
if not success and not continue_on_fail:
|
||||
# Print command immediately for failed tests
|
||||
print(f"\n{'=' * 80}")
|
||||
print("FAILED - Command to reproduce:")
|
||||
print(f"{'=' * 80}")
|
||||
print(cmd_str)
|
||||
print()
|
||||
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||
return success, cmd_str
|
||||
except subprocess.CalledProcessError:
|
||||
# Re-raise if we're not continuing on fail
|
||||
if not continue_on_fail:
|
||||
raise
|
||||
return False, cmd_str
|
||||
|
||||
|
||||
def run_grouped_test(
|
||||
components: list[str],
|
||||
platform: str,
|
||||
platform_with_version: str,
|
||||
base_file: Path,
|
||||
build_dir: Path,
|
||||
tests_dir: Path,
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
) -> tuple[bool, str]:
|
||||
"""Run esphome test for a group of components with shared bus configs.
|
||||
|
||||
Args:
|
||||
components: List of component names to test together
|
||||
platform: Platform name (e.g., "esp32-idf")
|
||||
platform_with_version: Platform with version (e.g., "esp32-idf-50")
|
||||
base_file: Path to platform base file
|
||||
build_dir: Path to build directory
|
||||
tests_dir: Path to tests/components directory
|
||||
esphome_command: ESPHome command (config/compile)
|
||||
continue_on_fail: Whether to continue on failure
|
||||
|
||||
Returns:
|
||||
Tuple of (success status, command string)
|
||||
"""
|
||||
# Create merged config
|
||||
group_name = "_".join(components[:3]) # Use first 3 components for name
|
||||
if len(components) > 3:
|
||||
group_name += f"_plus_{len(components) - 3}"
|
||||
|
||||
# Create unique device name by hashing sorted component list + platform
|
||||
# This prevents conflicts when different component groups are tested
|
||||
sorted_components = sorted(components)
|
||||
hash_input = "_".join(sorted_components) + "_" + platform
|
||||
group_hash = hashlib.md5(hash_input.encode()).hexdigest()[:8]
|
||||
device_name = f"comptest{platform.replace('-', '')}{group_hash}"
|
||||
|
||||
merged_config_file = build_dir / f"merged_{group_name}.{platform_with_version}.yaml"
|
||||
|
||||
try:
|
||||
merge_component_configs(
|
||||
component_names=components,
|
||||
platform=platform_with_version,
|
||||
tests_dir=tests_dir,
|
||||
output_file=merged_config_file,
|
||||
)
|
||||
except Exception as e: # pylint: disable=broad-exception-caught
|
||||
print(f"Error merging configs for {components}: {e}")
|
||||
if not continue_on_fail:
|
||||
raise
|
||||
# Return empty command string since we failed before building the command
|
||||
return False, f"# Failed during config merge: {e}"
|
||||
|
||||
# Create test file that includes merged config
|
||||
output_file = build_dir / f"test_{group_name}.{platform_with_version}.yaml"
|
||||
base_content = base_file.read_text()
|
||||
merged_ref = merged_config_file.name
|
||||
output_content = base_content.replace("$component_test_file", merged_ref)
|
||||
output_file.write_text(output_content)
|
||||
|
||||
# Build esphome command with --testing-mode
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"esphome",
|
||||
"--testing-mode", # Required for grouped tests
|
||||
"-s",
|
||||
"component_name",
|
||||
device_name, # Use unique hash-based device name
|
||||
"-s",
|
||||
"component_dir",
|
||||
"../../components",
|
||||
"-s",
|
||||
"test_name",
|
||||
"merged",
|
||||
"-s",
|
||||
"target_platform",
|
||||
platform,
|
||||
esphome_command,
|
||||
str(output_file),
|
||||
]
|
||||
|
||||
# Build command string for display/logging
|
||||
cmd_str = " ".join(cmd)
|
||||
|
||||
# Run command
|
||||
components_str = ", ".join(components)
|
||||
print(f"> [GROUPED: {components_str}] [{platform_with_version}]")
|
||||
print(" (using --testing-mode)")
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, check=False)
|
||||
success = result.returncode == 0
|
||||
|
||||
# Show disk space after build in CI during compile
|
||||
show_disk_space_if_ci(esphome_command)
|
||||
|
||||
if not success and not continue_on_fail:
|
||||
# Print command immediately for failed tests
|
||||
print(f"\n{'=' * 80}")
|
||||
print("FAILED - Command to reproduce:")
|
||||
print(f"{'=' * 80}")
|
||||
print(cmd_str)
|
||||
print()
|
||||
raise subprocess.CalledProcessError(result.returncode, cmd)
|
||||
return success, cmd_str
|
||||
except subprocess.CalledProcessError:
|
||||
# Re-raise if we're not continuing on fail
|
||||
if not continue_on_fail:
|
||||
raise
|
||||
return False, cmd_str
|
||||
|
||||
|
||||
def run_grouped_component_tests(
|
||||
all_tests: dict[str, list[Path]],
|
||||
platform_filter: str | None,
|
||||
platform_bases: dict[str, list[Path]],
|
||||
tests_dir: Path,
|
||||
build_dir: Path,
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
) -> tuple[set[tuple[str, str]], list[str], list[str], dict[str, str]]:
|
||||
"""Run grouped component tests.
|
||||
|
||||
Args:
|
||||
all_tests: Dictionary mapping component names to test files
|
||||
platform_filter: Optional platform to filter by
|
||||
platform_bases: Platform base files mapping
|
||||
tests_dir: Path to tests/components directory
|
||||
build_dir: Path to build directory
|
||||
esphome_command: ESPHome command (config/compile)
|
||||
continue_on_fail: Whether to continue on failure
|
||||
|
||||
Returns:
|
||||
Tuple of (tested_components, passed_tests, failed_tests, failed_commands)
|
||||
"""
|
||||
tested_components = set()
|
||||
passed_tests = []
|
||||
failed_tests = []
|
||||
failed_commands = {} # Map test_id to command string
|
||||
|
||||
# Group components by platform and bus signature
|
||||
grouped_components: dict[tuple[str, str], list[str]] = defaultdict(list)
|
||||
print("\n" + "=" * 80)
|
||||
print("Analyzing components for intelligent grouping...")
|
||||
print("=" * 80)
|
||||
component_buses, non_groupable, direct_bus_components = analyze_all_components(
|
||||
tests_dir
|
||||
)
|
||||
|
||||
# Track why components can't be grouped (for detailed output)
|
||||
non_groupable_reasons = {}
|
||||
|
||||
# Group by (platform, bus_signature)
|
||||
for component, platforms in component_buses.items():
|
||||
if component not in all_tests:
|
||||
continue
|
||||
|
||||
# Skip components that must be tested in isolation
|
||||
# These are shown separately and should not be in non_groupable_reasons
|
||||
if component in ISOLATED_COMPONENTS:
|
||||
continue
|
||||
|
||||
# Skip base bus components (these test the bus platforms themselves)
|
||||
if component in BASE_BUS_COMPONENTS:
|
||||
continue
|
||||
|
||||
# Skip components that use local file references or direct bus configs
|
||||
if component in non_groupable:
|
||||
# Track the reason (using pre-calculated results to avoid expensive re-analysis)
|
||||
if component not in non_groupable_reasons:
|
||||
if component in direct_bus_components:
|
||||
non_groupable_reasons[component] = (
|
||||
"Defines buses directly (not via packages) - NEEDS MIGRATION"
|
||||
)
|
||||
elif uses_local_file_references(tests_dir / component):
|
||||
non_groupable_reasons[component] = (
|
||||
"Uses local file references ($component_dir)"
|
||||
)
|
||||
elif is_platform_component(tests_dir / component):
|
||||
non_groupable_reasons[component] = (
|
||||
"Platform component (abstract base class)"
|
||||
)
|
||||
else:
|
||||
non_groupable_reasons[component] = (
|
||||
"Uses !extend or !remove directives"
|
||||
)
|
||||
continue
|
||||
|
||||
for platform, buses in platforms.items():
|
||||
# Skip if platform doesn't match filter
|
||||
if platform_filter and not platform.startswith(platform_filter):
|
||||
continue
|
||||
|
||||
# Create signature for this component's bus configuration
|
||||
# Components with no buses get NO_BUSES_SIGNATURE so they can be grouped together
|
||||
if buses:
|
||||
signature = create_grouping_signature({platform: buses}, platform)
|
||||
else:
|
||||
signature = NO_BUSES_SIGNATURE
|
||||
|
||||
# Add to grouped components (including those with no buses)
|
||||
if signature:
|
||||
grouped_components[(platform, signature)].append(component)
|
||||
|
||||
# Print detailed grouping plan
|
||||
print("\nGrouping Plan:")
|
||||
print("-" * 80)
|
||||
|
||||
# Show isolated components (must test individually due to known issues)
|
||||
isolated_in_tests = [c for c in ISOLATED_COMPONENTS if c in all_tests]
|
||||
if isolated_in_tests:
|
||||
print(
|
||||
f"\n⚠ {len(isolated_in_tests)} components must be tested in isolation (known build issues):"
|
||||
)
|
||||
for comp in sorted(isolated_in_tests):
|
||||
reason = ISOLATED_COMPONENTS[comp]
|
||||
print(f" - {comp}: {reason}")
|
||||
|
||||
# Show base bus components (test the bus platform implementations)
|
||||
base_bus_in_tests = [c for c in BASE_BUS_COMPONENTS if c in all_tests]
|
||||
if base_bus_in_tests:
|
||||
print(
|
||||
f"\n○ {len(base_bus_in_tests)} base bus platform components (tested individually):"
|
||||
)
|
||||
for comp in sorted(base_bus_in_tests):
|
||||
print(f" - {comp}")
|
||||
|
||||
# Show excluded components with detailed reasons
|
||||
if non_groupable_reasons:
|
||||
excluded_in_tests = [c for c in non_groupable_reasons if c in all_tests]
|
||||
if excluded_in_tests:
|
||||
print(
|
||||
f"\n⚠ {len(excluded_in_tests)} components excluded from grouping (each needs individual build):"
|
||||
)
|
||||
# Group by reason to show summary
|
||||
direct_bus = [
|
||||
c
|
||||
for c in excluded_in_tests
|
||||
if "NEEDS MIGRATION" in non_groupable_reasons.get(c, "")
|
||||
]
|
||||
if direct_bus:
|
||||
print(
|
||||
f"\n ⚠⚠⚠ {len(direct_bus)} DEFINE BUSES DIRECTLY - NEED MIGRATION TO PACKAGES:"
|
||||
)
|
||||
for comp in sorted(direct_bus):
|
||||
print(f" - {comp}")
|
||||
|
||||
other_reasons = [
|
||||
c
|
||||
for c in excluded_in_tests
|
||||
if "NEEDS MIGRATION" not in non_groupable_reasons.get(c, "")
|
||||
]
|
||||
if other_reasons and len(other_reasons) <= 10:
|
||||
print("\n Other non-groupable components:")
|
||||
for comp in sorted(other_reasons):
|
||||
reason = non_groupable_reasons[comp]
|
||||
print(f" - {comp}: {reason}")
|
||||
elif other_reasons:
|
||||
print(
|
||||
f"\n Other non-groupable components: {len(other_reasons)} components"
|
||||
)
|
||||
|
||||
# Distribute no_buses components into other groups to maximize efficiency
|
||||
# Components with no buses can merge with any bus group since they have no conflicting requirements
|
||||
no_buses_by_platform: dict[str, list[str]] = {}
|
||||
for (platform, signature), components in list(grouped_components.items()):
|
||||
if signature == NO_BUSES_SIGNATURE:
|
||||
no_buses_by_platform[platform] = components
|
||||
# Remove from grouped_components - we'll distribute them
|
||||
del grouped_components[(platform, signature)]
|
||||
|
||||
# Distribute no_buses components into existing groups for each platform
|
||||
for platform, no_buses_comps in no_buses_by_platform.items():
|
||||
# Find all non-empty groups for this platform (excluding no_buses)
|
||||
platform_groups = [
|
||||
(sig, comps)
|
||||
for (plat, sig), comps in grouped_components.items()
|
||||
if plat == platform and sig != NO_BUSES_SIGNATURE
|
||||
]
|
||||
|
||||
if platform_groups:
|
||||
# Distribute no_buses components round-robin across existing groups
|
||||
for i, comp in enumerate(no_buses_comps):
|
||||
sig, _ = platform_groups[i % len(platform_groups)]
|
||||
grouped_components[(platform, sig)].append(comp)
|
||||
else:
|
||||
# No other groups for this platform - keep no_buses components together
|
||||
grouped_components[(platform, NO_BUSES_SIGNATURE)] = no_buses_comps
|
||||
|
||||
# Split groups that exceed platform-specific maximum sizes
|
||||
# ESP8266 has limited IRAM and can't handle large component groups
|
||||
split_groups = {}
|
||||
for (platform, signature), components in list(grouped_components.items()):
|
||||
max_size = PLATFORM_MAX_GROUP_SIZE.get(platform)
|
||||
if max_size and len(components) > max_size:
|
||||
# Split this group into smaller groups
|
||||
print(
|
||||
f"\n ℹ️ Splitting {platform} group (signature: {signature}) "
|
||||
f"from {len(components)} to max {max_size} components per group"
|
||||
)
|
||||
# Remove original group
|
||||
del grouped_components[(platform, signature)]
|
||||
# Create split groups
|
||||
for i in range(0, len(components), max_size):
|
||||
split_components = components[i : i + max_size]
|
||||
# Create unique signature for each split group
|
||||
split_signature = f"{signature}_split{i // max_size + 1}"
|
||||
split_groups[(platform, split_signature)] = split_components
|
||||
# Add split groups back
|
||||
grouped_components.update(split_groups)
|
||||
|
||||
groups_to_test = []
|
||||
individual_tests = set() # Use set to avoid duplicates
|
||||
|
||||
for (platform, signature), components in sorted(grouped_components.items()):
|
||||
if len(components) > 1:
|
||||
groups_to_test.append((platform, signature, components))
|
||||
# Note: Don't add single-component groups to individual_tests here
|
||||
# They'll be added below when we check for ungrouped components
|
||||
|
||||
# Add components that weren't grouped on any platform
|
||||
for component in all_tests:
|
||||
if component not in [c for _, _, comps in groups_to_test for c in comps]:
|
||||
individual_tests.add(component)
|
||||
|
||||
if groups_to_test:
|
||||
print(f"\n✓ {len(groups_to_test)} groups will be tested together:")
|
||||
for platform, signature, components in groups_to_test:
|
||||
component_list = ", ".join(sorted(components))
|
||||
print(f" [{platform}] [{signature}]: {component_list}")
|
||||
print(
|
||||
f" → {len(components)} components in 1 build (saves {len(components) - 1} builds)"
|
||||
)
|
||||
|
||||
if individual_tests:
|
||||
print(f"\n○ {len(individual_tests)} components will be tested individually:")
|
||||
sorted_individual = sorted(individual_tests)
|
||||
for comp in sorted_individual[:10]:
|
||||
print(f" - {comp}")
|
||||
if len(individual_tests) > 10:
|
||||
print(f" ... and {len(individual_tests) - 10} more")
|
||||
|
||||
# Calculate actual build counts based on test files, not component counts
|
||||
# Without grouping: every test file would be built separately
|
||||
total_test_files = sum(len(test_files) for test_files in all_tests.values())
|
||||
|
||||
# With grouping:
|
||||
# - 1 build per group (regardless of how many components)
|
||||
# - Individual components still need all their platform builds
|
||||
individual_test_file_count = sum(
|
||||
len(all_tests[comp]) for comp in individual_tests if comp in all_tests
|
||||
)
|
||||
|
||||
total_grouped_components = sum(len(comps) for _, _, comps in groups_to_test)
|
||||
total_builds_with_grouping = len(groups_to_test) + individual_test_file_count
|
||||
builds_saved = total_test_files - total_builds_with_grouping
|
||||
|
||||
print(f"\n{'=' * 80}")
|
||||
print(
|
||||
f"Summary: {total_builds_with_grouping} builds total (vs {total_test_files} without grouping)"
|
||||
)
|
||||
print(
|
||||
f" • {len(groups_to_test)} grouped builds ({total_grouped_components} components)"
|
||||
)
|
||||
print(
|
||||
f" • {individual_test_file_count} individual builds ({len(individual_tests)} components)"
|
||||
)
|
||||
if total_test_files > 0:
|
||||
reduction_pct = (builds_saved / total_test_files) * 100
|
||||
print(f" • Saves {builds_saved} builds ({reduction_pct:.1f}% reduction)")
|
||||
print("=" * 80 + "\n")
|
||||
|
||||
# Execute grouped tests
|
||||
for (platform, signature), components in grouped_components.items():
|
||||
# Only group if we have multiple components with same signature
|
||||
if len(components) <= 1:
|
||||
continue
|
||||
|
||||
# Filter out components not in our test list
|
||||
components_to_group = [c for c in components if c in all_tests]
|
||||
if len(components_to_group) <= 1:
|
||||
continue
|
||||
|
||||
# Get platform base files
|
||||
if platform not in platform_bases:
|
||||
continue
|
||||
|
||||
for base_file in platform_bases[platform]:
|
||||
platform_with_version = extract_platform_with_version(base_file)
|
||||
|
||||
# Skip if platform filter doesn't match
|
||||
if platform_filter and platform != platform_filter:
|
||||
continue
|
||||
if (
|
||||
platform_filter
|
||||
and platform_with_version != platform_filter
|
||||
and not platform_with_version.startswith(f"{platform_filter}-")
|
||||
):
|
||||
continue
|
||||
|
||||
# Run grouped test
|
||||
success, cmd_str = run_grouped_test(
|
||||
components=components_to_group,
|
||||
platform=platform,
|
||||
platform_with_version=platform_with_version,
|
||||
base_file=base_file,
|
||||
build_dir=build_dir,
|
||||
tests_dir=tests_dir,
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
)
|
||||
|
||||
# Mark all components as tested
|
||||
for comp in components_to_group:
|
||||
tested_components.add((comp, platform_with_version))
|
||||
|
||||
# Record result for each component - show all components in grouped tests
|
||||
test_id = (
|
||||
f"GROUPED[{','.join(components_to_group)}].{platform_with_version}"
|
||||
)
|
||||
if success:
|
||||
passed_tests.append(test_id)
|
||||
else:
|
||||
failed_tests.append(test_id)
|
||||
failed_commands[test_id] = cmd_str
|
||||
|
||||
return tested_components, passed_tests, failed_tests, failed_commands
|
||||
|
||||
|
||||
def run_individual_component_test(
|
||||
component: str,
|
||||
test_file: Path,
|
||||
platform: str,
|
||||
platform_with_version: str,
|
||||
base_file: Path,
|
||||
build_dir: Path,
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
tested_components: set[tuple[str, str]],
|
||||
passed_tests: list[str],
|
||||
failed_tests: list[str],
|
||||
failed_commands: dict[str, str],
|
||||
) -> None:
|
||||
"""Run an individual component test if not already tested in a group.
|
||||
|
||||
Args:
|
||||
component: Component name
|
||||
test_file: Test file path
|
||||
platform: Platform name
|
||||
platform_with_version: Platform with version
|
||||
base_file: Base file for platform
|
||||
build_dir: Build directory
|
||||
esphome_command: ESPHome command
|
||||
continue_on_fail: Whether to continue on failure
|
||||
tested_components: Set of already tested components
|
||||
passed_tests: List to append passed test IDs
|
||||
failed_tests: List to append failed test IDs
|
||||
failed_commands: Dict to store failed test commands
|
||||
"""
|
||||
# Skip if already tested in a group
|
||||
if (component, platform_with_version) in tested_components:
|
||||
return
|
||||
|
||||
test_name = test_file.stem.split(".")[0]
|
||||
success, cmd_str = run_esphome_test(
|
||||
component=component,
|
||||
test_file=test_file,
|
||||
platform=platform,
|
||||
platform_with_version=platform_with_version,
|
||||
base_file=base_file,
|
||||
build_dir=build_dir,
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
)
|
||||
test_id = f"{component}.{test_name}.{platform_with_version}"
|
||||
if success:
|
||||
passed_tests.append(test_id)
|
||||
else:
|
||||
failed_tests.append(test_id)
|
||||
failed_commands[test_id] = cmd_str
|
||||
|
||||
|
||||
def test_components(
|
||||
component_patterns: list[str],
|
||||
platform_filter: str | None,
|
||||
esphome_command: str,
|
||||
continue_on_fail: bool,
|
||||
enable_grouping: bool = True,
|
||||
) -> int:
|
||||
"""Test components with optional intelligent grouping.
|
||||
|
||||
Args:
|
||||
component_patterns: List of component name patterns
|
||||
platform_filter: Optional platform to filter by
|
||||
esphome_command: ESPHome command (config/compile)
|
||||
continue_on_fail: Whether to continue on failure
|
||||
enable_grouping: Whether to enable component grouping
|
||||
|
||||
Returns:
|
||||
Exit code (0 for success, 1 for failure)
|
||||
"""
|
||||
# Setup paths
|
||||
repo_root = Path(__file__).parent.parent
|
||||
tests_dir = repo_root / "tests" / "components"
|
||||
build_components_dir = repo_root / "tests" / "test_build_components"
|
||||
build_dir = build_components_dir / "build"
|
||||
build_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Get platform base files
|
||||
platform_bases = get_platform_base_files(build_components_dir)
|
||||
|
||||
# Find all component tests
|
||||
all_tests = {}
|
||||
for pattern in component_patterns:
|
||||
all_tests.update(find_component_tests(tests_dir, pattern))
|
||||
|
||||
if not all_tests:
|
||||
print(f"No components found matching: {component_patterns}")
|
||||
return 1
|
||||
|
||||
print(f"Found {len(all_tests)} components to test")
|
||||
|
||||
# Run tests
|
||||
failed_tests = []
|
||||
passed_tests = []
|
||||
tested_components = set() # Track which components were tested in groups
|
||||
failed_commands = {} # Track commands for failed tests
|
||||
|
||||
# First, run grouped tests if grouping is enabled
|
||||
if enable_grouping:
|
||||
(
|
||||
tested_components,
|
||||
passed_tests,
|
||||
failed_tests,
|
||||
failed_commands,
|
||||
) = run_grouped_component_tests(
|
||||
all_tests=all_tests,
|
||||
platform_filter=platform_filter,
|
||||
platform_bases=platform_bases,
|
||||
tests_dir=tests_dir,
|
||||
build_dir=build_dir,
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
)
|
||||
|
||||
# Then run individual tests for components not in groups
|
||||
for component, test_files in sorted(all_tests.items()):
|
||||
for test_file in test_files:
|
||||
test_name, platform = parse_test_filename(test_file)
|
||||
|
||||
# Handle "all" platform tests
|
||||
if platform == "all":
|
||||
# Run for all platforms
|
||||
for plat, base_files in platform_bases.items():
|
||||
if platform_filter and plat != platform_filter:
|
||||
continue
|
||||
|
||||
for base_file in base_files:
|
||||
platform_with_version = extract_platform_with_version(base_file)
|
||||
run_individual_component_test(
|
||||
component=component,
|
||||
test_file=test_file,
|
||||
platform=plat,
|
||||
platform_with_version=platform_with_version,
|
||||
base_file=base_file,
|
||||
build_dir=build_dir,
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
tested_components=tested_components,
|
||||
passed_tests=passed_tests,
|
||||
failed_tests=failed_tests,
|
||||
failed_commands=failed_commands,
|
||||
)
|
||||
else:
|
||||
# Platform-specific test
|
||||
if platform_filter and platform != platform_filter:
|
||||
continue
|
||||
|
||||
if platform not in platform_bases:
|
||||
print(f"No base file for platform: {platform}")
|
||||
continue
|
||||
|
||||
for base_file in platform_bases[platform]:
|
||||
platform_with_version = extract_platform_with_version(base_file)
|
||||
|
||||
# Skip if requested platform doesn't match
|
||||
if (
|
||||
platform_filter
|
||||
and platform_with_version != platform_filter
|
||||
and not platform_with_version.startswith(f"{platform_filter}-")
|
||||
):
|
||||
continue
|
||||
|
||||
run_individual_component_test(
|
||||
component=component,
|
||||
test_file=test_file,
|
||||
platform=platform,
|
||||
platform_with_version=platform_with_version,
|
||||
base_file=base_file,
|
||||
build_dir=build_dir,
|
||||
esphome_command=esphome_command,
|
||||
continue_on_fail=continue_on_fail,
|
||||
tested_components=tested_components,
|
||||
passed_tests=passed_tests,
|
||||
failed_tests=failed_tests,
|
||||
failed_commands=failed_commands,
|
||||
)
|
||||
|
||||
# Print summary
|
||||
print("\n" + "=" * 80)
|
||||
print(f"Test Summary: {len(passed_tests)} passed, {len(failed_tests)} failed")
|
||||
print("=" * 80)
|
||||
|
||||
if failed_tests:
|
||||
print("\nFailed tests:")
|
||||
for test in failed_tests:
|
||||
print(f" - {test}")
|
||||
|
||||
# Print failed commands at the end for easy copy-paste from CI logs
|
||||
print("\n" + "=" * 80)
|
||||
print("Failed test commands (copy-paste to reproduce locally):")
|
||||
print("=" * 80)
|
||||
for test in failed_tests:
|
||||
if test in failed_commands:
|
||||
print(f"\n# {test}")
|
||||
print(failed_commands[test])
|
||||
print()
|
||||
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def main() -> int:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Test ESPHome component builds with intelligent grouping"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--esphome-command",
|
||||
default="compile",
|
||||
choices=["config", "compile", "clean"],
|
||||
help="ESPHome command to run (default: compile)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--components",
|
||||
default="*",
|
||||
help="Component pattern(s) to test (default: *). Comma-separated.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--target",
|
||||
help="Target platform to test (e.g., esp32-idf)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--continue-on-fail",
|
||||
action="store_true",
|
||||
help="Continue testing even if a test fails",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-grouping",
|
||||
action="store_true",
|
||||
help="Disable component grouping (test each component individually)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Parse component patterns
|
||||
component_patterns = [p.strip() for p in args.components.split(",")]
|
||||
|
||||
return test_components(
|
||||
component_patterns=component_patterns,
|
||||
platform_filter=args.target,
|
||||
esphome_command=args.esphome_command,
|
||||
continue_on_fail=args.continue_on_fail,
|
||||
enable_grouping=not args.no_grouping,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
227
script/test_component_grouping.py
Executable file
227
script/test_component_grouping.py
Executable file
@@ -0,0 +1,227 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Test component grouping by finding and testing groups of components.
|
||||
|
||||
This script analyzes components, finds groups that can be tested together,
|
||||
and runs test builds for those groups.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
# Add esphome to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
from script.analyze_component_buses import (
|
||||
analyze_all_components,
|
||||
group_components_by_signature,
|
||||
)
|
||||
|
||||
|
||||
def test_component_group(
|
||||
components: list[str],
|
||||
platform: str,
|
||||
esphome_command: str = "compile",
|
||||
dry_run: bool = False,
|
||||
) -> bool:
|
||||
"""Test a group of components together.
|
||||
|
||||
Args:
|
||||
components: List of component names to test together
|
||||
platform: Platform to test on (e.g., "esp32-idf")
|
||||
esphome_command: ESPHome command to run (config/compile/clean)
|
||||
dry_run: If True, only print the command without running it
|
||||
|
||||
Returns:
|
||||
True if test passed, False otherwise
|
||||
"""
|
||||
components_str = ",".join(components)
|
||||
cmd = [
|
||||
"./script/test_build_components",
|
||||
"-c",
|
||||
components_str,
|
||||
"-t",
|
||||
platform,
|
||||
"-e",
|
||||
esphome_command,
|
||||
]
|
||||
|
||||
print(f"\n{'=' * 80}")
|
||||
print(f"Testing {len(components)} components on {platform}:")
|
||||
for comp in components:
|
||||
print(f" - {comp}")
|
||||
print(f"{'=' * 80}")
|
||||
print(f"Command: {' '.join(cmd)}\n")
|
||||
|
||||
if dry_run:
|
||||
print("[DRY RUN] Skipping actual test")
|
||||
return True
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, check=False)
|
||||
return result.returncode == 0
|
||||
except Exception as e:
|
||||
print(f"Error running test: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Main entry point."""
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Test component grouping by finding and testing groups"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--platform",
|
||||
"-p",
|
||||
default="esp32-idf",
|
||||
help="Platform to test (default: esp32-idf)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--esphome-command",
|
||||
default="compile",
|
||||
choices=["config", "compile", "clean"],
|
||||
help="ESPHome command to run (default: compile)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="Test all components (sets --min-size=1, --max-size=10000, --max-groups=10000)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--min-size",
|
||||
type=int,
|
||||
default=3,
|
||||
help="Minimum group size to test (default: 3)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-size",
|
||||
type=int,
|
||||
default=10,
|
||||
help="Maximum group size to test (default: 10)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--max-groups",
|
||||
type=int,
|
||||
default=5,
|
||||
help="Maximum number of groups to test (default: 5)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--signature",
|
||||
"-s",
|
||||
help="Only test groups with this bus signature (e.g., 'spi', 'i2c', 'uart')",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Print commands without running them",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# If --all is specified, test all components without grouping
|
||||
if args.all:
|
||||
# Get all components from tests/components directory
|
||||
components_dir = Path("tests/components")
|
||||
all_components = sorted(
|
||||
[d.name for d in components_dir.iterdir() if d.is_dir()]
|
||||
)
|
||||
|
||||
if not all_components:
|
||||
print(f"\nNo components found in {components_dir}")
|
||||
return
|
||||
|
||||
print(f"\nTesting all {len(all_components)} components together")
|
||||
|
||||
success = test_component_group(
|
||||
all_components, args.platform, args.esphome_command, args.dry_run
|
||||
)
|
||||
|
||||
# Print summary
|
||||
print(f"\n{'=' * 80}")
|
||||
print("TEST SUMMARY")
|
||||
print(f"{'=' * 80}")
|
||||
status = "✅ PASS" if success else "❌ FAIL"
|
||||
print(f"{status} All components: {len(all_components)} components")
|
||||
|
||||
if not args.dry_run and not success:
|
||||
sys.exit(1)
|
||||
return
|
||||
|
||||
print("Analyzing all components...")
|
||||
components, non_groupable, _ = analyze_all_components(Path("tests/components"))
|
||||
|
||||
print(f"Found {len(components)} components, {len(non_groupable)} non-groupable")
|
||||
|
||||
# Group components by signature for the platform
|
||||
groups = group_components_by_signature(components, args.platform)
|
||||
|
||||
# Filter and sort groups
|
||||
filtered_groups = []
|
||||
for signature, comp_list in groups.items():
|
||||
# Filter by signature if specified
|
||||
if args.signature and signature != args.signature:
|
||||
continue
|
||||
|
||||
# Remove non-groupable components
|
||||
comp_list = [c for c in comp_list if c not in non_groupable]
|
||||
|
||||
# Filter by minimum size
|
||||
if len(comp_list) < args.min_size:
|
||||
continue
|
||||
|
||||
# If group is larger than max_size, we'll take a subset later
|
||||
filtered_groups.append((signature, comp_list))
|
||||
|
||||
# Sort by group size (largest first)
|
||||
filtered_groups.sort(key=lambda x: len(x[1]), reverse=True)
|
||||
|
||||
# Limit number of groups
|
||||
filtered_groups = filtered_groups[: args.max_groups]
|
||||
|
||||
if not filtered_groups:
|
||||
print("\nNo groups found matching criteria:")
|
||||
print(f" - Platform: {args.platform}")
|
||||
print(f" - Size: {args.min_size}-{args.max_size}")
|
||||
if args.signature:
|
||||
print(f" - Signature: {args.signature}")
|
||||
return
|
||||
|
||||
print(f"\nFound {len(filtered_groups)} groups to test:")
|
||||
for signature, comp_list in filtered_groups:
|
||||
print(f" [{signature}]: {len(comp_list)} components")
|
||||
|
||||
# Test each group
|
||||
results = []
|
||||
for signature, comp_list in filtered_groups:
|
||||
# Limit to max_size if group is larger
|
||||
if len(comp_list) > args.max_size:
|
||||
comp_list = comp_list[: args.max_size]
|
||||
|
||||
success = test_component_group(
|
||||
comp_list, args.platform, args.esphome_command, args.dry_run
|
||||
)
|
||||
results.append((signature, comp_list, success))
|
||||
|
||||
if not args.dry_run and not success:
|
||||
print(f"\n❌ FAILED: {signature} group")
|
||||
break
|
||||
|
||||
# Print summary
|
||||
print(f"\n{'=' * 80}")
|
||||
print("TEST SUMMARY")
|
||||
print(f"{'=' * 80}")
|
||||
for signature, comp_list, success in results:
|
||||
status = "✅ PASS" if success else "❌ FAIL"
|
||||
print(f"{status} [{signature}]: {len(comp_list)} components")
|
||||
|
||||
# Exit with error if any tests failed
|
||||
if not args.dry_run and any(not success for _, _, success in results):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -1,11 +1,4 @@
|
||||
uart:
|
||||
- id: uart_a01nyub
|
||||
tx_pin: ${tx_pin}
|
||||
rx_pin: ${rx_pin}
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
- platform: a01nyub
|
||||
id: a01nyub_sensor
|
||||
name: a01nyub Distance
|
||||
uart_id: uart_a01nyub
|
||||
|
@@ -1,5 +0,0 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO17
|
||||
rx_pin: GPIO16
|
||||
|
||||
<<: !include common.yaml
|
@@ -1,5 +0,0 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
|
||||
<<: !include common.yaml
|
@@ -1,3 +1,6 @@
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart/esp32-c3-idf.yaml
|
||||
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
|
@@ -1,5 +1,8 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO17
|
||||
rx_pin: GPIO16
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart/esp32-idf.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@@ -1,5 +1,4 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@@ -1,5 +1,4 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart/rp2040-ard.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@@ -1,11 +1,4 @@
|
||||
uart:
|
||||
- id: uart_a02yyuw
|
||||
tx_pin: ${tx_pin}
|
||||
rx_pin: ${rx_pin}
|
||||
baud_rate: 9600
|
||||
|
||||
sensor:
|
||||
- platform: a02yyuw
|
||||
id: a02yyuw_sensor
|
||||
name: a02yyuw Distance
|
||||
uart_id: uart_a02yyuw
|
||||
|
@@ -1,5 +0,0 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO17
|
||||
rx_pin: GPIO16
|
||||
|
||||
<<: !include common.yaml
|
@@ -1,5 +0,0 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
|
||||
<<: !include common.yaml
|
@@ -1,3 +1,6 @@
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart/esp32-c3-idf.yaml
|
||||
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
|
@@ -1,5 +1,8 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO17
|
||||
rx_pin: GPIO16
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart/esp32-idf.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@@ -1,5 +1,4 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart/esp8266-ard.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@@ -1,5 +1,4 @@
|
||||
substitutions:
|
||||
tx_pin: GPIO4
|
||||
rx_pin: GPIO5
|
||||
packages:
|
||||
uart: !include ../../test_build_components/common/uart/rp2040-ard.yaml
|
||||
|
||||
<<: !include common.yaml
|
||||
|
@@ -1,6 +0,0 @@
|
||||
substitutions:
|
||||
step_pin: GPIO22
|
||||
dir_pin: GPIO23
|
||||
sleep_pin: GPIO25
|
||||
|
||||
<<: !include common.yaml
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user