mirror of
https://github.com/esphome/esphome.git
synced 2025-10-24 02:58:43 +00:00
Compare commits
47 Commits
fix_clang_
...
light-addr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
902680a2e0 | ||
|
|
e1c851cab8 | ||
|
|
146b067d62 | ||
|
|
5b15827009 | ||
|
|
0de79ba291 | ||
|
|
e3aaf6a144 | ||
|
|
78ffeb30fb | ||
|
|
1b3cbb9f60 | ||
|
|
e3ecbf6d65 | ||
|
|
603e3d94c7 | ||
|
|
98f691913f | ||
|
|
a89a35bff3 | ||
|
|
2c1927fd12 | ||
|
|
c6ae1a5909 | ||
|
|
e9e306501a | ||
|
|
9c712744be | ||
|
|
ae50a09b4e | ||
|
|
1ea80594c6 | ||
|
|
8500323d39 | ||
|
|
6f7db2f5f7 | ||
|
|
9922c65912 | ||
|
|
f2469077d9 | ||
|
|
742eca92d8 | ||
|
|
548913b471 | ||
|
|
a05c5ea240 | ||
|
|
8e8a2bde95 | ||
|
|
80265a6bd2 | ||
|
|
87e9a7a1bd | ||
|
|
3aedfe8be3 | ||
|
|
7f2cc47ed6 | ||
|
|
a5542e0d2b | ||
|
|
66afe4a9be | ||
|
|
0ae9009e41 | ||
|
|
0b2f5fcd7e | ||
|
|
7a2887e2ed | ||
|
|
cd2d3f061d | ||
|
|
73f5d01c2d | ||
|
|
0938609f7a | ||
|
|
77203f0cb4 | ||
|
|
040130e357 | ||
|
|
85959e3004 | ||
|
|
a809a13729 | ||
|
|
3b6ff615e8 | ||
|
|
05216db5f0 | ||
|
|
9f668b0c4b | ||
|
|
2aa3bceed8 | ||
|
|
bdfa84ed87 |
@@ -1 +1 @@
|
||||
d7693a1e996cacd4a3d1c9a16336799c2a8cc3db02e4e74084151ce964581248
|
||||
3d46b63015d761c85ca9cb77ab79a389509e5776701fb22aed16e7b79d432c0c
|
||||
|
||||
7
.github/workflows/auto-label-pr.yml
vendored
7
.github/workflows/auto-label-pr.yml
vendored
@@ -53,6 +53,7 @@ jobs:
|
||||
'new-target-platform',
|
||||
'merging-to-release',
|
||||
'merging-to-beta',
|
||||
'chained-pr',
|
||||
'core',
|
||||
'small-pr',
|
||||
'dashboard',
|
||||
@@ -140,6 +141,8 @@ jobs:
|
||||
labels.add('merging-to-release');
|
||||
} else if (baseRef === 'beta') {
|
||||
labels.add('merging-to-beta');
|
||||
} else if (baseRef !== 'dev') {
|
||||
labels.add('chained-pr');
|
||||
}
|
||||
|
||||
return labels;
|
||||
@@ -528,8 +531,8 @@ jobs:
|
||||
const apiData = await fetchApiData();
|
||||
const baseRef = context.payload.pull_request.base.ref;
|
||||
|
||||
// Early exit for non-dev branches
|
||||
if (baseRef !== 'dev') {
|
||||
// Early exit for release and beta branches only
|
||||
if (baseRef === 'release' || baseRef === 'beta') {
|
||||
const branchLabels = await detectMergeBranch();
|
||||
const finalLabels = Array.from(branchLabels);
|
||||
|
||||
|
||||
31
.github/workflows/ci.yml
vendored
31
.github/workflows/ci.yml
vendored
@@ -178,6 +178,8 @@ jobs:
|
||||
component-test-count: ${{ steps.determine.outputs.component-test-count }}
|
||||
changed-cpp-file-count: ${{ steps.determine.outputs.changed-cpp-file-count }}
|
||||
memory_impact: ${{ steps.determine.outputs.memory-impact }}
|
||||
cpp-unit-tests-run-all: ${{ steps.determine.outputs.cpp-unit-tests-run-all }}
|
||||
cpp-unit-tests-components: ${{ steps.determine.outputs.cpp-unit-tests-components }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
@@ -210,6 +212,8 @@ jobs:
|
||||
echo "component-test-count=$(echo "$output" | jq -r '.component_test_count')" >> $GITHUB_OUTPUT
|
||||
echo "changed-cpp-file-count=$(echo "$output" | jq -r '.changed_cpp_file_count')" >> $GITHUB_OUTPUT
|
||||
echo "memory-impact=$(echo "$output" | jq -c '.memory_impact')" >> $GITHUB_OUTPUT
|
||||
echo "cpp-unit-tests-run-all=$(echo "$output" | jq -r '.cpp_unit_tests_run_all')" >> $GITHUB_OUTPUT
|
||||
echo "cpp-unit-tests-components=$(echo "$output" | jq -c '.cpp_unit_tests_components')" >> $GITHUB_OUTPUT
|
||||
|
||||
integration-tests:
|
||||
name: Run integration tests
|
||||
@@ -247,6 +251,33 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pytest -vv --no-cov --tb=native -n auto tests/integration/
|
||||
|
||||
cpp-unit-tests:
|
||||
name: Run C++ unit tests
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- common
|
||||
- determine-jobs
|
||||
if: github.event_name == 'pull_request' && (needs.determine-jobs.outputs.cpp-unit-tests-run-all == 'true' || needs.determine-jobs.outputs.cpp-unit-tests-components != '[]')
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Restore Python
|
||||
uses: ./.github/actions/restore-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
cache-key: ${{ needs.common.outputs.cache-key }}
|
||||
|
||||
- name: Run cpp_unit_test.py
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
if [ "${{ needs.determine-jobs.outputs.cpp-unit-tests-run-all }}" = "true" ]; then
|
||||
script/cpp_unit_test.py --all
|
||||
else
|
||||
ARGS=$(echo '${{ needs.determine-jobs.outputs.cpp-unit-tests-components }}' | jq -r '.[] | @sh' | xargs)
|
||||
script/cpp_unit_test.py $ARGS
|
||||
fi
|
||||
|
||||
clang-tidy-single:
|
||||
name: ${{ matrix.name }}
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
1
.github/workflows/status-check-labels.yml
vendored
1
.github/workflows/status-check-labels.yml
vendored
@@ -14,6 +14,7 @@ jobs:
|
||||
label:
|
||||
- needs-docs
|
||||
- merge-after-release
|
||||
- chained-pr
|
||||
steps:
|
||||
- name: Check for ${{ matrix.label }} label
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
|
||||
@@ -231,9 +231,22 @@ class MemoryAnalyzerCLI(MemoryAnalyzer):
|
||||
api_component = (name, mem)
|
||||
break
|
||||
|
||||
# Combine all components to analyze: top ESPHome + all external + API if not already included
|
||||
components_to_analyze = list(top_esphome_components) + list(
|
||||
top_external_components
|
||||
# Also include wifi_stack and other important system components if they exist
|
||||
system_components_to_include = [
|
||||
# Empty list - we've finished debugging symbol categorization
|
||||
# Add component names here if you need to debug their symbols
|
||||
]
|
||||
system_components = [
|
||||
(name, mem)
|
||||
for name, mem in components
|
||||
if name in system_components_to_include
|
||||
]
|
||||
|
||||
# Combine all components to analyze: top ESPHome + all external + API if not already included + system components
|
||||
components_to_analyze = (
|
||||
list(top_esphome_components)
|
||||
+ list(top_external_components)
|
||||
+ system_components
|
||||
)
|
||||
if api_component and api_component not in components_to_analyze:
|
||||
components_to_analyze.append(api_component)
|
||||
|
||||
@@ -127,40 +127,39 @@ SYMBOL_PATTERNS = {
|
||||
"tryget_socket_unconn",
|
||||
"cs_create_ctrl_sock",
|
||||
"netbuf_alloc",
|
||||
"tcp_", # TCP protocol functions
|
||||
"udp_", # UDP protocol functions
|
||||
"lwip_", # LwIP stack functions
|
||||
"eagle_lwip", # ESP-specific LwIP functions
|
||||
"new_linkoutput", # Link output function
|
||||
"acd_", # Address Conflict Detection (ACD)
|
||||
"eth_", # Ethernet functions
|
||||
"mac_enable_bb", # MAC baseband enable
|
||||
"reassemble_and_dispatch", # Packet reassembly
|
||||
],
|
||||
# dhcp must come before libc to avoid "dhcp_select" matching "select" pattern
|
||||
"dhcp": ["dhcp", "handle_dhcp"],
|
||||
"ipv6_stack": ["nd6_", "ip6_", "mld6_", "icmp6_", "icmp6_input"],
|
||||
"wifi_stack": [
|
||||
"ieee80211",
|
||||
"hostap",
|
||||
"sta_",
|
||||
"ap_",
|
||||
"scan_",
|
||||
"wifi_",
|
||||
"wpa_",
|
||||
"wps_",
|
||||
"esp_wifi",
|
||||
"cnx_",
|
||||
"wpa3_",
|
||||
"sae_",
|
||||
"wDev_",
|
||||
"ic_",
|
||||
"mac_",
|
||||
"esf_buf",
|
||||
"gWpaSm",
|
||||
"sm_WPA",
|
||||
"eapol_",
|
||||
"owe_",
|
||||
"wifiLowLevelInit",
|
||||
"s_do_mapping",
|
||||
"gScanStruct",
|
||||
"ppSearchTxframe",
|
||||
"ppMapWaitTxq",
|
||||
"ppFillAMPDUBar",
|
||||
"ppCheckTxConnTrafficIdle",
|
||||
"ppCalTkipMic",
|
||||
# Order matters! More specific categories must come before general ones.
|
||||
# mdns must come before bluetooth to avoid "_mdns_disable_pcb" matching "ble_" pattern
|
||||
"mdns_lib": ["mdns"],
|
||||
# memory_mgmt must come before wifi_stack to catch mmu_hal_* symbols
|
||||
"memory_mgmt": [
|
||||
"mem_",
|
||||
"memory_",
|
||||
"tlsf_",
|
||||
"memp_",
|
||||
"pbuf_",
|
||||
"pbuf_alloc",
|
||||
"pbuf_copy_partial_pbuf",
|
||||
"esp_mmu_map",
|
||||
"mmu_hal_",
|
||||
"s_do_mapping", # Memory mapping function, not WiFi
|
||||
"hash_map_", # Hash map data structure
|
||||
"umm_assimilate", # UMM malloc assimilation
|
||||
],
|
||||
"bluetooth": ["bt_", "ble_", "l2c_", "gatt_", "gap_", "hci_", "BT_init"],
|
||||
"wifi_bt_coex": ["coex"],
|
||||
# Bluetooth categories must come BEFORE wifi_stack to avoid misclassification
|
||||
# Many BLE symbols contain patterns like "ble_" that would otherwise match wifi patterns
|
||||
"bluetooth_rom": ["r_ble", "r_lld", "r_llc", "r_llm"],
|
||||
"bluedroid_bt": [
|
||||
"bluedroid",
|
||||
@@ -207,6 +206,61 @@ SYMBOL_PATTERNS = {
|
||||
"copy_extra_byte_in_db",
|
||||
"parse_read_local_supported_commands_response",
|
||||
],
|
||||
"bluetooth": [
|
||||
"bt_",
|
||||
"_ble_", # More specific than "ble_" to avoid matching "able_", "enable_", "disable_"
|
||||
"l2c_",
|
||||
"l2ble_", # L2CAP for BLE
|
||||
"gatt_",
|
||||
"gap_",
|
||||
"hci_",
|
||||
"btsnd_hcic_", # Bluetooth HCI command send functions
|
||||
"BT_init",
|
||||
"BT_tx_", # Bluetooth transmit functions
|
||||
"esp_ble_", # Catch esp_ble_* functions
|
||||
],
|
||||
"bluetooth_ll": [
|
||||
"llm_", # Link layer manager
|
||||
"llc_", # Link layer control
|
||||
"lld_", # Link layer driver
|
||||
"ld_acl_", # Link layer ACL (Asynchronous Connection-Oriented)
|
||||
"llcp_", # Link layer control protocol
|
||||
"lmp_", # Link manager protocol
|
||||
],
|
||||
"wifi_bt_coex": ["coex"],
|
||||
"wifi_stack": [
|
||||
"ieee80211",
|
||||
"hostap",
|
||||
"sta_",
|
||||
"wifi_ap_", # More specific than "ap_" to avoid matching "cap_", "map_"
|
||||
"wifi_scan_", # More specific than "scan_" to avoid matching "_scan_" in other contexts
|
||||
"wifi_",
|
||||
"wpa_",
|
||||
"wps_",
|
||||
"esp_wifi",
|
||||
"cnx_",
|
||||
"wpa3_",
|
||||
"sae_",
|
||||
"wDev_",
|
||||
"ic_mac_", # More specific than "mac_" to avoid matching emac_
|
||||
"esf_buf",
|
||||
"gWpaSm",
|
||||
"sm_WPA",
|
||||
"eapol_",
|
||||
"owe_",
|
||||
"wifiLowLevelInit",
|
||||
# Removed "s_do_mapping" - this is memory management, not WiFi
|
||||
"gScanStruct",
|
||||
"ppSearchTxframe",
|
||||
"ppMapWaitTxq",
|
||||
"ppFillAMPDUBar",
|
||||
"ppCheckTxConnTrafficIdle",
|
||||
"ppCalTkipMic",
|
||||
"phy_force_wifi",
|
||||
"phy_unforce_wifi",
|
||||
"write_wifi_chan",
|
||||
"wifi_track_pll",
|
||||
],
|
||||
"crypto_math": [
|
||||
"ecp_",
|
||||
"bignum_",
|
||||
@@ -231,13 +285,36 @@ SYMBOL_PATTERNS = {
|
||||
"p_256_init_curve",
|
||||
"shift_sub_rows",
|
||||
"rshift",
|
||||
"rijndaelEncrypt", # AES Rijndael encryption
|
||||
],
|
||||
# System and Arduino core functions must come before libc
|
||||
"esp_system": [
|
||||
"system_", # ESP system functions
|
||||
"postmortem_", # Postmortem reporting
|
||||
],
|
||||
"arduino_core": [
|
||||
"pinMode",
|
||||
"resetPins",
|
||||
"millis",
|
||||
"micros",
|
||||
"delay(", # More specific - Arduino delay function with parenthesis
|
||||
"delayMicroseconds",
|
||||
"digitalWrite",
|
||||
"digitalRead",
|
||||
],
|
||||
"sntp": ["sntp_", "sntp_recv"],
|
||||
"scheduler": [
|
||||
"run_scheduled_",
|
||||
"compute_scheduled_",
|
||||
"event_TaskQueue",
|
||||
],
|
||||
"hw_crypto": ["esp_aes", "esp_sha", "esp_rsa", "esp_bignum", "esp_mpi"],
|
||||
"libc": [
|
||||
"printf",
|
||||
"scanf",
|
||||
"malloc",
|
||||
"free",
|
||||
"_free", # More specific than "free" to match _free, __free_r, etc. but not arbitrary "free" substring
|
||||
"umm_free", # UMM malloc free function
|
||||
"memcpy",
|
||||
"memset",
|
||||
"strcpy",
|
||||
@@ -259,7 +336,7 @@ SYMBOL_PATTERNS = {
|
||||
"_setenv_r",
|
||||
"_tzset_unlocked_r",
|
||||
"__tzcalc_limits",
|
||||
"select",
|
||||
"_select", # More specific than "select" to avoid matching "dhcp_select", etc.
|
||||
"scalbnf",
|
||||
"strtof",
|
||||
"strtof_l",
|
||||
@@ -316,8 +393,24 @@ SYMBOL_PATTERNS = {
|
||||
"CSWTCH$",
|
||||
"dst$",
|
||||
"sulp",
|
||||
"_strtol_l", # String to long with locale
|
||||
"__cvt", # Convert
|
||||
"__utoa", # Unsigned to ASCII
|
||||
"__global_locale", # Global locale
|
||||
"_ctype_", # Character type
|
||||
"impure_data", # Impure data
|
||||
],
|
||||
"string_ops": [
|
||||
"strcmp",
|
||||
"strncmp",
|
||||
"strchr",
|
||||
"strstr",
|
||||
"strtok",
|
||||
"strdup",
|
||||
"strncasecmp_P", # String compare (case insensitive, from program memory)
|
||||
"strnlen_P", # String length (from program memory)
|
||||
"strncat_P", # String concatenate (from program memory)
|
||||
],
|
||||
"string_ops": ["strcmp", "strncmp", "strchr", "strstr", "strtok", "strdup"],
|
||||
"memory_alloc": ["malloc", "calloc", "realloc", "free", "_sbrk"],
|
||||
"file_io": [
|
||||
"fread",
|
||||
@@ -338,10 +431,26 @@ SYMBOL_PATTERNS = {
|
||||
"vsscanf",
|
||||
],
|
||||
"cpp_anonymous": ["_GLOBAL__N_", "n$"],
|
||||
"cpp_runtime": ["__cxx", "_ZN", "_ZL", "_ZSt", "__gxx_personality", "_Z16"],
|
||||
"exception_handling": ["__cxa_", "_Unwind_", "__gcc_personality", "uw_frame_state"],
|
||||
# Plain C patterns only - C++ symbols will be categorized via DEMANGLED_PATTERNS
|
||||
"nvs": ["nvs_"], # Plain C NVS functions
|
||||
"ota": ["ota_", "OTA", "esp_ota", "app_desc"],
|
||||
# cpp_runtime: Removed _ZN, _ZL to let DEMANGLED_PATTERNS categorize C++ symbols properly
|
||||
# Only keep patterns that are truly runtime-specific and not categorizable by namespace
|
||||
"cpp_runtime": ["__cxx", "_ZSt", "__gxx_personality", "_Z16"],
|
||||
"exception_handling": [
|
||||
"__cxa_",
|
||||
"_Unwind_",
|
||||
"__gcc_personality",
|
||||
"uw_frame_state",
|
||||
"search_object", # Search for exception handling object
|
||||
"get_cie_encoding", # Get CIE encoding
|
||||
"add_fdes", # Add frame description entries
|
||||
"fde_unencoded_compare", # Compare FDEs
|
||||
"fde_mixed_encoding_compare", # Compare mixed encoding FDEs
|
||||
"frame_downheap", # Frame heap operations
|
||||
"frame_heapsort", # Frame heap sorting
|
||||
],
|
||||
"static_init": ["_GLOBAL__sub_I_"],
|
||||
"mdns_lib": ["mdns"],
|
||||
"phy_radio": [
|
||||
"phy_",
|
||||
"rf_",
|
||||
@@ -394,10 +503,47 @@ SYMBOL_PATTERNS = {
|
||||
"txcal_debuge_mode",
|
||||
"ant_wifitx_cfg",
|
||||
"reg_init_begin",
|
||||
"tx_cap_init", # TX capacitance init
|
||||
"ram_set_txcap", # RAM TX capacitance setting
|
||||
"tx_atten_", # TX attenuation
|
||||
"txiq_", # TX I/Q calibration
|
||||
"ram_cal_", # RAM calibration
|
||||
"ram_rxiq_", # RAM RX I/Q
|
||||
"readvdd33", # Read VDD33
|
||||
"test_tout", # Test timeout
|
||||
"tsen_meas", # Temperature sensor measurement
|
||||
"bbpll_cal", # Baseband PLL calibration
|
||||
"set_cal_", # Set calibration
|
||||
"set_rfanagain_", # Set RF analog gain
|
||||
"set_txdc_", # Set TX DC
|
||||
"get_vdd33_", # Get VDD33
|
||||
"gen_rx_gain_table", # Generate RX gain table
|
||||
"ram_ana_inf_gating_en", # RAM analog interface gating enable
|
||||
"tx_cont_en", # TX continuous enable
|
||||
"tx_delay_cfg", # TX delay configuration
|
||||
"tx_gain_table_set", # TX gain table set
|
||||
"check_and_reset_hw_deadlock", # Hardware deadlock check
|
||||
"s_config", # System/hardware config
|
||||
"chan14_mic_cfg", # Channel 14 MIC config
|
||||
],
|
||||
"wifi_phy_pp": [
|
||||
"pp_",
|
||||
"ppT",
|
||||
"ppR",
|
||||
"ppP",
|
||||
"ppInstall",
|
||||
"ppCalTxAMPDULength",
|
||||
"ppCheckTx", # Packet processor TX check
|
||||
"ppCal", # Packet processor calibration
|
||||
"HdlAllBuffedEb", # Handle buffered EB
|
||||
],
|
||||
"wifi_phy_pp": ["pp_", "ppT", "ppR", "ppP", "ppInstall", "ppCalTxAMPDULength"],
|
||||
"wifi_lmac": ["lmac"],
|
||||
"wifi_device": ["wdev", "wDev_"],
|
||||
"wifi_device": [
|
||||
"wdev",
|
||||
"wDev_",
|
||||
"ic_set_sta", # Set station mode
|
||||
"ic_set_vif", # Set virtual interface
|
||||
],
|
||||
"power_mgmt": [
|
||||
"pm_",
|
||||
"sleep",
|
||||
@@ -406,15 +552,7 @@ SYMBOL_PATTERNS = {
|
||||
"deep_sleep",
|
||||
"power_down",
|
||||
"g_pm",
|
||||
],
|
||||
"memory_mgmt": [
|
||||
"mem_",
|
||||
"memory_",
|
||||
"tlsf_",
|
||||
"memp_",
|
||||
"pbuf_",
|
||||
"pbuf_alloc",
|
||||
"pbuf_copy_partial_pbuf",
|
||||
"pmc", # Power Management Controller
|
||||
],
|
||||
"hal_layer": ["hal_"],
|
||||
"clock_mgmt": [
|
||||
@@ -439,7 +577,6 @@ SYMBOL_PATTERNS = {
|
||||
"error_handling": ["panic", "abort", "assert", "error_", "fault"],
|
||||
"authentication": ["auth"],
|
||||
"ppp_protocol": ["ppp", "ipcp_", "lcp_", "chap_", "LcpEchoCheck"],
|
||||
"dhcp": ["dhcp", "handle_dhcp"],
|
||||
"ethernet_phy": [
|
||||
"emac_",
|
||||
"eth_phy_",
|
||||
@@ -618,7 +755,15 @@ SYMBOL_PATTERNS = {
|
||||
"ampdu_dispatch_upto",
|
||||
],
|
||||
"ieee802_11": ["ieee802_11_", "ieee802_11_parse_elems"],
|
||||
"rate_control": ["rssi_margin", "rcGetSched", "get_rate_fcc_index"],
|
||||
"rate_control": [
|
||||
"rssi_margin",
|
||||
"rcGetSched",
|
||||
"get_rate_fcc_index",
|
||||
"rcGetRate", # Get rate
|
||||
"rc_get_", # Rate control getters
|
||||
"rc_set_", # Rate control setters
|
||||
"rc_enable_", # Rate control enable functions
|
||||
],
|
||||
"nan": ["nan_dp_", "nan_dp_post_tx", "nan_dp_delete_peer"],
|
||||
"channel_mgmt": ["chm_init", "chm_set_current_channel"],
|
||||
"trace": ["trc_init", "trc_onAmpduOp"],
|
||||
@@ -799,31 +944,18 @@ SYMBOL_PATTERNS = {
|
||||
"supports_interlaced_inquiry_scan",
|
||||
"supports_reading_remote_extended_features",
|
||||
],
|
||||
"bluetooth_ll": [
|
||||
"lld_pdu_",
|
||||
"ld_acl_",
|
||||
"lld_stop_ind_handler",
|
||||
"lld_evt_winsize_change",
|
||||
"config_lld_evt_funcs_reset",
|
||||
"config_lld_funcs_reset",
|
||||
"config_llm_funcs_reset",
|
||||
"llm_set_long_adv_data",
|
||||
"lld_retry_tx_prog",
|
||||
"llc_link_sup_to_ind_handler",
|
||||
"config_llc_funcs_reset",
|
||||
"lld_evt_rxwin_compute",
|
||||
"config_btdm_funcs_reset",
|
||||
"config_ea_funcs_reset",
|
||||
"llc_defalut_state_tab_reset",
|
||||
"config_rwip_funcs_reset",
|
||||
"ke_lmp_rx_flooding_detect",
|
||||
],
|
||||
}
|
||||
|
||||
# Demangled patterns: patterns found in demangled C++ names
|
||||
DEMANGLED_PATTERNS = {
|
||||
"gpio_driver": ["GPIO"],
|
||||
"uart_driver": ["UART"],
|
||||
# mdns_lib must come before network_stack to avoid "udp" matching "_udpReadBuffer" in MDNSResponder
|
||||
"mdns_lib": [
|
||||
"MDNSResponder",
|
||||
"MDNSImplementation",
|
||||
"MDNS",
|
||||
],
|
||||
"network_stack": [
|
||||
"lwip",
|
||||
"tcp",
|
||||
@@ -836,6 +968,24 @@ DEMANGLED_PATTERNS = {
|
||||
"ethernet",
|
||||
"ppp",
|
||||
"slip",
|
||||
"UdpContext", # UDP context class
|
||||
"DhcpServer", # DHCP server class
|
||||
],
|
||||
"arduino_core": [
|
||||
"String::", # Arduino String class
|
||||
"Print::", # Arduino Print class
|
||||
"HardwareSerial::", # Serial class
|
||||
"IPAddress::", # IP address class
|
||||
"EspClass::", # ESP class
|
||||
"experimental::_SPI", # Experimental SPI
|
||||
],
|
||||
"ota": [
|
||||
"UpdaterClass",
|
||||
"Updater::",
|
||||
],
|
||||
"wifi": [
|
||||
"ESP8266WiFi",
|
||||
"WiFi::",
|
||||
],
|
||||
"wifi_stack": ["NetworkInterface"],
|
||||
"nimble_bt": [
|
||||
@@ -854,7 +1004,6 @@ DEMANGLED_PATTERNS = {
|
||||
"rtti": ["__type_info", "__class_type_info"],
|
||||
"web_server_lib": ["AsyncWebServer", "AsyncWebHandler", "WebServer"],
|
||||
"async_tcp": ["AsyncClient", "AsyncServer"],
|
||||
"mdns_lib": ["mdns"],
|
||||
"json_lib": [
|
||||
"ArduinoJson",
|
||||
"JsonDocument",
|
||||
|
||||
@@ -1572,7 +1572,13 @@ bool APIConnection::send_noise_encryption_set_key_response(const NoiseEncryption
|
||||
resp.success = false;
|
||||
|
||||
psk_t psk{};
|
||||
if (base64_decode(msg.key, psk.data(), msg.key.size()) != psk.size()) {
|
||||
if (msg.key.empty()) {
|
||||
if (this->parent_->clear_noise_psk(true)) {
|
||||
resp.success = true;
|
||||
} else {
|
||||
ESP_LOGW(TAG, "Failed to clear encryption key");
|
||||
}
|
||||
} else if (base64_decode(msg.key, psk.data(), msg.key.size()) != psk.size()) {
|
||||
ESP_LOGW(TAG, "Invalid encryption key length");
|
||||
} else if (!this->parent_->save_noise_psk(psk, true)) {
|
||||
ESP_LOGW(TAG, "Failed to save encryption key");
|
||||
|
||||
@@ -468,6 +468,31 @@ uint16_t APIServer::get_port() const { return this->port_; }
|
||||
void APIServer::set_reboot_timeout(uint32_t reboot_timeout) { this->reboot_timeout_ = reboot_timeout; }
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
bool APIServer::update_noise_psk_(const SavedNoisePsk &new_psk, const LogString *save_log_msg,
|
||||
const LogString *fail_log_msg, const psk_t &active_psk, bool make_active) {
|
||||
if (!this->noise_pref_.save(&new_psk)) {
|
||||
ESP_LOGW(TAG, "%s", LOG_STR_ARG(fail_log_msg));
|
||||
return false;
|
||||
}
|
||||
// ensure it's written immediately
|
||||
if (!global_preferences->sync()) {
|
||||
ESP_LOGW(TAG, "Failed to sync preferences");
|
||||
return false;
|
||||
}
|
||||
ESP_LOGD(TAG, "%s", LOG_STR_ARG(save_log_msg));
|
||||
if (make_active) {
|
||||
this->set_timeout(100, [this, active_psk]() {
|
||||
ESP_LOGW(TAG, "Disconnecting all clients to reset PSK");
|
||||
this->set_noise_psk(active_psk);
|
||||
for (auto &c : this->clients_) {
|
||||
DisconnectRequest req;
|
||||
c->send_message(req, DisconnectRequest::MESSAGE_TYPE);
|
||||
}
|
||||
});
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool APIServer::save_noise_psk(psk_t psk, bool make_active) {
|
||||
#ifdef USE_API_NOISE_PSK_FROM_YAML
|
||||
// When PSK is set from YAML, this function should never be called
|
||||
@@ -482,27 +507,21 @@ bool APIServer::save_noise_psk(psk_t psk, bool make_active) {
|
||||
}
|
||||
|
||||
SavedNoisePsk new_saved_psk{psk};
|
||||
if (!this->noise_pref_.save(&new_saved_psk)) {
|
||||
ESP_LOGW(TAG, "Failed to save Noise PSK");
|
||||
return false;
|
||||
}
|
||||
// ensure it's written immediately
|
||||
if (!global_preferences->sync()) {
|
||||
ESP_LOGW(TAG, "Failed to sync preferences");
|
||||
return false;
|
||||
}
|
||||
ESP_LOGD(TAG, "Noise PSK saved");
|
||||
if (make_active) {
|
||||
this->set_timeout(100, [this, psk]() {
|
||||
ESP_LOGW(TAG, "Disconnecting all clients to reset PSK");
|
||||
this->set_noise_psk(psk);
|
||||
for (auto &c : this->clients_) {
|
||||
DisconnectRequest req;
|
||||
c->send_message(req, DisconnectRequest::MESSAGE_TYPE);
|
||||
}
|
||||
});
|
||||
}
|
||||
return true;
|
||||
return this->update_noise_psk_(new_saved_psk, LOG_STR("Noise PSK saved"), LOG_STR("Failed to save Noise PSK"), psk,
|
||||
make_active);
|
||||
#endif
|
||||
}
|
||||
bool APIServer::clear_noise_psk(bool make_active) {
|
||||
#ifdef USE_API_NOISE_PSK_FROM_YAML
|
||||
// When PSK is set from YAML, this function should never be called
|
||||
// but if it is, reject the change
|
||||
ESP_LOGW(TAG, "Key set in YAML");
|
||||
return false;
|
||||
#else
|
||||
SavedNoisePsk empty_psk{};
|
||||
psk_t empty{};
|
||||
return this->update_noise_psk_(empty_psk, LOG_STR("Noise PSK cleared"), LOG_STR("Failed to clear Noise PSK"), empty,
|
||||
make_active);
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
||||
@@ -53,6 +53,7 @@ class APIServer : public Component, public Controller {
|
||||
|
||||
#ifdef USE_API_NOISE
|
||||
bool save_noise_psk(psk_t psk, bool make_active = true);
|
||||
bool clear_noise_psk(bool make_active = true);
|
||||
void set_noise_psk(psk_t psk) { noise_ctx_->set_psk(psk); }
|
||||
std::shared_ptr<APINoiseContext> get_noise_ctx() { return noise_ctx_; }
|
||||
#endif // USE_API_NOISE
|
||||
@@ -174,6 +175,10 @@ class APIServer : public Component, public Controller {
|
||||
|
||||
protected:
|
||||
void schedule_reboot_timeout_();
|
||||
#ifdef USE_API_NOISE
|
||||
bool update_noise_psk_(const SavedNoisePsk &new_psk, const LogString *save_log_msg, const LogString *fail_log_msg,
|
||||
const psk_t &active_psk, bool make_active);
|
||||
#endif // USE_API_NOISE
|
||||
// Pointers and pointer-like types first (4 bytes each)
|
||||
std::unique_ptr<socket::Socket> socket_ = nullptr;
|
||||
#ifdef USE_API_CLIENT_CONNECTED_TRIGGER
|
||||
|
||||
@@ -264,20 +264,31 @@ async def delayed_off_filter_to_code(config, filter_id):
|
||||
),
|
||||
)
|
||||
async def autorepeat_filter_to_code(config, filter_id):
|
||||
timings = []
|
||||
if len(config) > 0:
|
||||
timings.extend(
|
||||
(conf[CONF_DELAY], conf[CONF_TIME_OFF], conf[CONF_TIME_ON])
|
||||
for conf in config
|
||||
)
|
||||
else:
|
||||
timings.append(
|
||||
(
|
||||
cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds,
|
||||
cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds,
|
||||
cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds,
|
||||
timings = [
|
||||
cg.StructInitializer(
|
||||
cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"),
|
||||
("delay", conf[CONF_DELAY]),
|
||||
("time_off", conf[CONF_TIME_OFF]),
|
||||
("time_on", conf[CONF_TIME_ON]),
|
||||
)
|
||||
)
|
||||
for conf in config
|
||||
]
|
||||
else:
|
||||
timings = [
|
||||
cg.StructInitializer(
|
||||
cg.MockObj("AutorepeatFilterTiming", "esphome::binary_sensor::"),
|
||||
("delay", cv.time_period_str_unit(DEFAULT_DELAY).total_milliseconds),
|
||||
(
|
||||
"time_off",
|
||||
cv.time_period_str_unit(DEFAULT_TIME_OFF).total_milliseconds,
|
||||
),
|
||||
(
|
||||
"time_on",
|
||||
cv.time_period_str_unit(DEFAULT_TIME_ON).total_milliseconds,
|
||||
),
|
||||
)
|
||||
]
|
||||
var = cg.new_Pvariable(filter_id, timings)
|
||||
await cg.register_component(var, {})
|
||||
return var
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
|
||||
#include <cinttypes>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/components/binary_sensor/binary_sensor.h"
|
||||
|
||||
namespace esphome {
|
||||
@@ -92,8 +92,8 @@ class DoubleClickTrigger : public Trigger<> {
|
||||
|
||||
class MultiClickTrigger : public Trigger<>, public Component {
|
||||
public:
|
||||
explicit MultiClickTrigger(BinarySensor *parent, std::vector<MultiClickTriggerEvent> timing)
|
||||
: parent_(parent), timing_(std::move(timing)) {}
|
||||
explicit MultiClickTrigger(BinarySensor *parent, std::initializer_list<MultiClickTriggerEvent> timing)
|
||||
: parent_(parent), timing_(timing) {}
|
||||
|
||||
void setup() override {
|
||||
this->last_state_ = this->parent_->get_state_default(false);
|
||||
@@ -115,7 +115,7 @@ class MultiClickTrigger : public Trigger<>, public Component {
|
||||
void trigger_();
|
||||
|
||||
BinarySensor *parent_;
|
||||
std::vector<MultiClickTriggerEvent> timing_;
|
||||
FixedVector<MultiClickTriggerEvent> timing_;
|
||||
uint32_t invalid_cooldown_{1000};
|
||||
optional<size_t> at_index_{};
|
||||
bool last_state_{false};
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
#include "filter.h"
|
||||
|
||||
#include "binary_sensor.h"
|
||||
#include <utility>
|
||||
|
||||
namespace esphome {
|
||||
|
||||
@@ -68,7 +67,7 @@ float DelayedOffFilter::get_setup_priority() const { return setup_priority::HARD
|
||||
|
||||
optional<bool> InvertFilter::new_value(bool value) { return !value; }
|
||||
|
||||
AutorepeatFilter::AutorepeatFilter(std::vector<AutorepeatFilterTiming> timings) : timings_(std::move(timings)) {}
|
||||
AutorepeatFilter::AutorepeatFilter(std::initializer_list<AutorepeatFilterTiming> timings) : timings_(timings) {}
|
||||
|
||||
optional<bool> AutorepeatFilter::new_value(bool value) {
|
||||
if (value) {
|
||||
|
||||
@@ -4,8 +4,6 @@
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
|
||||
namespace binary_sensor {
|
||||
@@ -82,11 +80,6 @@ class InvertFilter : public Filter {
|
||||
};
|
||||
|
||||
struct AutorepeatFilterTiming {
|
||||
AutorepeatFilterTiming(uint32_t delay, uint32_t off, uint32_t on) {
|
||||
this->delay = delay;
|
||||
this->time_off = off;
|
||||
this->time_on = on;
|
||||
}
|
||||
uint32_t delay;
|
||||
uint32_t time_off;
|
||||
uint32_t time_on;
|
||||
@@ -94,7 +87,7 @@ struct AutorepeatFilterTiming {
|
||||
|
||||
class AutorepeatFilter : public Filter, public Component {
|
||||
public:
|
||||
explicit AutorepeatFilter(std::vector<AutorepeatFilterTiming> timings);
|
||||
explicit AutorepeatFilter(std::initializer_list<AutorepeatFilterTiming> timings);
|
||||
|
||||
optional<bool> new_value(bool value) override;
|
||||
|
||||
@@ -104,7 +97,7 @@ class AutorepeatFilter : public Filter, public Component {
|
||||
void next_timing_();
|
||||
void next_value_(bool val);
|
||||
|
||||
std::vector<AutorepeatFilterTiming> timings_;
|
||||
FixedVector<AutorepeatFilterTiming> timings_;
|
||||
uint8_t active_timing_{0};
|
||||
};
|
||||
|
||||
|
||||
@@ -385,12 +385,14 @@ void Climate::save_state_() {
|
||||
if (!traits.get_supported_custom_fan_modes().empty() && custom_fan_mode.has_value()) {
|
||||
state.uses_custom_fan_mode = true;
|
||||
const auto &supported = traits.get_supported_custom_fan_modes();
|
||||
std::vector<std::string> vec{supported.begin(), supported.end()};
|
||||
for (size_t i = 0; i < vec.size(); i++) {
|
||||
if (vec[i] == custom_fan_mode) {
|
||||
// std::set has consistent order (lexicographic for strings)
|
||||
size_t i = 0;
|
||||
for (const auto &mode : supported) {
|
||||
if (mode == custom_fan_mode) {
|
||||
state.custom_fan_mode = i;
|
||||
break;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_presets() && preset.has_value()) {
|
||||
@@ -400,12 +402,14 @@ void Climate::save_state_() {
|
||||
if (!traits.get_supported_custom_presets().empty() && custom_preset.has_value()) {
|
||||
state.uses_custom_preset = true;
|
||||
const auto &supported = traits.get_supported_custom_presets();
|
||||
std::vector<std::string> vec{supported.begin(), supported.end()};
|
||||
for (size_t i = 0; i < vec.size(); i++) {
|
||||
if (vec[i] == custom_preset) {
|
||||
// std::set has consistent order (lexicographic for strings)
|
||||
size_t i = 0;
|
||||
for (const auto &preset : supported) {
|
||||
if (preset == custom_preset) {
|
||||
state.custom_preset = i;
|
||||
break;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
@@ -549,22 +553,34 @@ void ClimateDeviceRestoreState::apply(Climate *climate) {
|
||||
climate->fan_mode = this->fan_mode;
|
||||
}
|
||||
if (!traits.get_supported_custom_fan_modes().empty() && this->uses_custom_fan_mode) {
|
||||
// std::set has consistent order (lexicographic for strings), so this is ok
|
||||
// std::set has consistent order (lexicographic for strings)
|
||||
const auto &modes = traits.get_supported_custom_fan_modes();
|
||||
std::vector<std::string> modes_vec{modes.begin(), modes.end()};
|
||||
if (custom_fan_mode < modes_vec.size()) {
|
||||
climate->custom_fan_mode = modes_vec[this->custom_fan_mode];
|
||||
if (custom_fan_mode < modes.size()) {
|
||||
size_t i = 0;
|
||||
for (const auto &mode : modes) {
|
||||
if (i == this->custom_fan_mode) {
|
||||
climate->custom_fan_mode = mode;
|
||||
break;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_presets() && !this->uses_custom_preset) {
|
||||
climate->preset = this->preset;
|
||||
}
|
||||
if (!traits.get_supported_custom_presets().empty() && uses_custom_preset) {
|
||||
// std::set has consistent order (lexicographic for strings), so this is ok
|
||||
// std::set has consistent order (lexicographic for strings)
|
||||
const auto &presets = traits.get_supported_custom_presets();
|
||||
std::vector<std::string> presets_vec{presets.begin(), presets.end()};
|
||||
if (custom_preset < presets_vec.size()) {
|
||||
climate->custom_preset = presets_vec[this->custom_preset];
|
||||
if (custom_preset < presets.size()) {
|
||||
size_t i = 0;
|
||||
for (const auto &preset : presets) {
|
||||
if (i == this->custom_preset) {
|
||||
climate->custom_preset = preset;
|
||||
break;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (traits.get_supports_swing_modes()) {
|
||||
|
||||
@@ -550,6 +550,32 @@ CONF_ENABLE_LWIP_BRIDGE_INTERFACE = "enable_lwip_bridge_interface"
|
||||
CONF_ENABLE_LWIP_TCPIP_CORE_LOCKING = "enable_lwip_tcpip_core_locking"
|
||||
CONF_ENABLE_LWIP_CHECK_THREAD_SAFETY = "enable_lwip_check_thread_safety"
|
||||
CONF_DISABLE_LIBC_LOCKS_IN_IRAM = "disable_libc_locks_in_iram"
|
||||
CONF_DISABLE_VFS_SUPPORT_TERMIOS = "disable_vfs_support_termios"
|
||||
CONF_DISABLE_VFS_SUPPORT_SELECT = "disable_vfs_support_select"
|
||||
CONF_DISABLE_VFS_SUPPORT_DIR = "disable_vfs_support_dir"
|
||||
|
||||
# VFS requirement tracking
|
||||
# Components that need VFS features can call require_vfs_select() or require_vfs_dir()
|
||||
KEY_VFS_SELECT_REQUIRED = "vfs_select_required"
|
||||
KEY_VFS_DIR_REQUIRED = "vfs_dir_required"
|
||||
|
||||
|
||||
def require_vfs_select() -> None:
|
||||
"""Mark that VFS select support is required by a component.
|
||||
|
||||
Call this from components that use esp_vfs_eventfd or other VFS select features.
|
||||
This prevents CONFIG_VFS_SUPPORT_SELECT from being disabled.
|
||||
"""
|
||||
CORE.data[KEY_VFS_SELECT_REQUIRED] = True
|
||||
|
||||
|
||||
def require_vfs_dir() -> None:
|
||||
"""Mark that VFS directory support is required by a component.
|
||||
|
||||
Call this from components that use directory functions (opendir, readdir, mkdir, etc.).
|
||||
This prevents CONFIG_VFS_SUPPORT_DIR from being disabled.
|
||||
"""
|
||||
CORE.data[KEY_VFS_DIR_REQUIRED] = True
|
||||
|
||||
|
||||
def _validate_idf_component(config: ConfigType) -> ConfigType:
|
||||
@@ -615,6 +641,13 @@ FRAMEWORK_SCHEMA = cv.All(
|
||||
cv.Optional(
|
||||
CONF_DISABLE_LIBC_LOCKS_IN_IRAM, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_DISABLE_VFS_SUPPORT_TERMIOS, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_DISABLE_VFS_SUPPORT_SELECT, default=True
|
||||
): cv.boolean,
|
||||
cv.Optional(CONF_DISABLE_VFS_SUPPORT_DIR, default=True): cv.boolean,
|
||||
cv.Optional(CONF_EXECUTE_FROM_PSRAM): cv.boolean,
|
||||
}
|
||||
),
|
||||
@@ -962,6 +995,43 @@ async def to_code(config):
|
||||
if advanced.get(CONF_DISABLE_LIBC_LOCKS_IN_IRAM, True):
|
||||
add_idf_sdkconfig_option("CONFIG_LIBC_LOCKS_PLACE_IN_IRAM", False)
|
||||
|
||||
# Disable VFS support for termios (terminal I/O functions)
|
||||
# ESPHome doesn't use termios functions on ESP32 (only used in host UART driver).
|
||||
# Saves approximately 1.8KB of flash when disabled (default).
|
||||
add_idf_sdkconfig_option(
|
||||
"CONFIG_VFS_SUPPORT_TERMIOS",
|
||||
not advanced.get(CONF_DISABLE_VFS_SUPPORT_TERMIOS, True),
|
||||
)
|
||||
|
||||
# Disable VFS support for select() with file descriptors
|
||||
# ESPHome only uses select() with sockets via lwip_select(), which still works.
|
||||
# VFS select is only needed for UART/eventfd file descriptors.
|
||||
# Components that need it (e.g., openthread) call require_vfs_select().
|
||||
# Saves approximately 2.7KB of flash when disabled (default).
|
||||
if CORE.data.get(KEY_VFS_SELECT_REQUIRED, False):
|
||||
# Component requires VFS select - force enable regardless of user setting
|
||||
add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_SELECT", True)
|
||||
else:
|
||||
# No component needs it - allow user to control (default: disabled)
|
||||
add_idf_sdkconfig_option(
|
||||
"CONFIG_VFS_SUPPORT_SELECT",
|
||||
not advanced.get(CONF_DISABLE_VFS_SUPPORT_SELECT, True),
|
||||
)
|
||||
|
||||
# Disable VFS support for directory functions (opendir, readdir, mkdir, etc.)
|
||||
# ESPHome doesn't use directory functions on ESP32.
|
||||
# Components that need it (e.g., storage components) call require_vfs_dir().
|
||||
# Saves approximately 0.5KB+ of flash when disabled (default).
|
||||
if CORE.data.get(KEY_VFS_DIR_REQUIRED, False):
|
||||
# Component requires VFS directory support - force enable regardless of user setting
|
||||
add_idf_sdkconfig_option("CONFIG_VFS_SUPPORT_DIR", True)
|
||||
else:
|
||||
# No component needs it - allow user to control (default: disabled)
|
||||
add_idf_sdkconfig_option(
|
||||
"CONFIG_VFS_SUPPORT_DIR",
|
||||
not advanced.get(CONF_DISABLE_VFS_SUPPORT_DIR, True),
|
||||
)
|
||||
|
||||
cg.add_platformio_option("board_build.partitions", "partitions.csv")
|
||||
if CONF_PARTITIONS in config:
|
||||
add_extra_build_file(
|
||||
|
||||
@@ -112,7 +112,7 @@ async def to_code(config):
|
||||
|
||||
cg.add_define("USE_IMPROV")
|
||||
|
||||
await improv_base.setup_improv_core(var, config)
|
||||
await improv_base.setup_improv_core(var, config, "esp32_improv")
|
||||
|
||||
cg.add(var.set_identify_duration(config[CONF_IDENTIFY_DURATION]))
|
||||
cg.add(var.set_authorized_duration(config[CONF_AUTHORIZED_DURATION]))
|
||||
|
||||
@@ -389,11 +389,13 @@ void ESP32ImprovComponent::check_wifi_connection_() {
|
||||
std::string url_strings[3];
|
||||
size_t url_count = 0;
|
||||
|
||||
#ifdef USE_ESP32_IMPROV_NEXT_URL
|
||||
// Add next_url if configured (should be first per Improv BLE spec)
|
||||
std::string next_url = this->get_formatted_next_url_();
|
||||
if (!next_url.empty()) {
|
||||
url_strings[url_count++] = std::move(next_url);
|
||||
}
|
||||
#endif
|
||||
|
||||
// Add default URLs for backward compatibility
|
||||
url_strings[url_count++] = ESPHOME_MY_LINK;
|
||||
|
||||
@@ -190,7 +190,9 @@ async def to_code(config):
|
||||
cg.add_define("ESPHOME_VARIANT", "ESP8266")
|
||||
cg.add_define(ThreadModel.SINGLE)
|
||||
|
||||
cg.add_platformio_option("extra_scripts", ["pre:iram_fix.py", "post:post_build.py"])
|
||||
cg.add_platformio_option(
|
||||
"extra_scripts", ["pre:testing_mode.py", "post:post_build.py"]
|
||||
)
|
||||
|
||||
conf = config[CONF_FRAMEWORK]
|
||||
cg.add_platformio_option("framework", "arduino")
|
||||
@@ -230,9 +232,9 @@ async def to_code(config):
|
||||
# For cases where nullptrs can be handled, use nothrow: `new (std::nothrow) T;`
|
||||
cg.add_build_flag("-DNEW_OOM_ABORT")
|
||||
|
||||
# In testing mode, fake a larger IRAM to allow linking grouped component tests
|
||||
# Real ESP8266 hardware only has 32KB IRAM, but for CI testing we pretend it has 2MB
|
||||
# This is done via a pre-build script that generates a custom linker script
|
||||
# In testing mode, fake larger memory to allow linking grouped component tests
|
||||
# Real ESP8266 hardware only has 32KB IRAM and ~80KB RAM, but for CI testing
|
||||
# we pretend it has much larger memory to test that components compile together
|
||||
if CORE.testing_mode:
|
||||
cg.add_build_flag("-DESPHOME_TESTING_MODE")
|
||||
|
||||
@@ -271,8 +273,8 @@ def copy_files():
|
||||
post_build_file,
|
||||
CORE.relative_build_path("post_build.py"),
|
||||
)
|
||||
iram_fix_file = dir / "iram_fix.py.script"
|
||||
testing_mode_file = dir / "testing_mode.py.script"
|
||||
copy_file_if_changed(
|
||||
iram_fix_file,
|
||||
CORE.relative_build_path("iram_fix.py"),
|
||||
testing_mode_file,
|
||||
CORE.relative_build_path("testing_mode.py"),
|
||||
)
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
# pylint: disable=E0602
|
||||
Import("env") # noqa
|
||||
|
||||
|
||||
def patch_linker_script_after_preprocess(source, target, env):
|
||||
"""Patch the local linker script after PlatformIO preprocesses it."""
|
||||
# Check if we're in testing mode by looking for the define
|
||||
build_flags = env.get("BUILD_FLAGS", [])
|
||||
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||
|
||||
if not testing_mode:
|
||||
return
|
||||
|
||||
# Get the local linker script path
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
local_ld = os.path.join(build_dir, "ld", "local.eagle.app.v6.common.ld")
|
||||
|
||||
if not os.path.exists(local_ld):
|
||||
return
|
||||
|
||||
# Read the linker script
|
||||
with open(local_ld, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Replace IRAM size from 0x8000 (32KB) to 0x200000 (2MB)
|
||||
# The line looks like: iram1_0_seg : org = 0x40100000, len = 0x8000
|
||||
updated = re.sub(
|
||||
r"(iram1_0_seg\s*:\s*org\s*=\s*0x40100000\s*,\s*len\s*=\s*)0x8000",
|
||||
r"\g<1>0x200000",
|
||||
content,
|
||||
)
|
||||
|
||||
if updated != content:
|
||||
with open(local_ld, "w") as f:
|
||||
f.write(updated)
|
||||
print("ESPHome: Patched IRAM size to 2MB for testing mode")
|
||||
|
||||
|
||||
# Hook into the build process right before linking
|
||||
# This runs after PlatformIO has already preprocessed the linker scripts
|
||||
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_linker_script_after_preprocess)
|
||||
166
esphome/components/esp8266/testing_mode.py.script
Normal file
166
esphome/components/esp8266/testing_mode.py.script
Normal file
@@ -0,0 +1,166 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
# pylint: disable=E0602
|
||||
Import("env") # noqa
|
||||
|
||||
|
||||
# Memory sizes for testing mode (allow larger builds for CI component grouping)
|
||||
TESTING_IRAM_SIZE = "0x200000" # 2MB
|
||||
TESTING_DRAM_SIZE = "0x200000" # 2MB
|
||||
TESTING_FLASH_SIZE = "0x2000000" # 32MB
|
||||
|
||||
|
||||
def patch_segment_size(content, segment_name, new_size, label):
|
||||
"""Patch a memory segment's length in linker script.
|
||||
|
||||
Args:
|
||||
content: Linker script content
|
||||
segment_name: Name of the segment (e.g., 'iram1_0_seg')
|
||||
new_size: New size as hex string (e.g., '0x200000')
|
||||
label: Human-readable label for logging (e.g., 'IRAM')
|
||||
|
||||
Returns:
|
||||
Tuple of (patched_content, was_patched)
|
||||
"""
|
||||
# Match: segment_name : org = 0x..., len = 0x...
|
||||
pattern = rf"({segment_name}\s*:\s*org\s*=\s*0x[0-9a-fA-F]+\s*,\s*len\s*=\s*)0x[0-9a-fA-F]+"
|
||||
new_content = re.sub(pattern, rf"\g<1>{new_size}", content)
|
||||
return new_content, new_content != content
|
||||
|
||||
|
||||
def apply_memory_patches(content):
|
||||
"""Apply IRAM, DRAM, and Flash patches to linker script content.
|
||||
|
||||
Args:
|
||||
content: Linker script content as string
|
||||
|
||||
Returns:
|
||||
Patched content as string
|
||||
"""
|
||||
patches_applied = []
|
||||
|
||||
# Patch IRAM (for larger code in IRAM)
|
||||
content, patched = patch_segment_size(content, "iram1_0_seg", TESTING_IRAM_SIZE, "IRAM")
|
||||
if patched:
|
||||
patches_applied.append("IRAM")
|
||||
|
||||
# Patch DRAM (for larger BSS/data sections)
|
||||
content, patched = patch_segment_size(content, "dram0_0_seg", TESTING_DRAM_SIZE, "DRAM")
|
||||
if patched:
|
||||
patches_applied.append("DRAM")
|
||||
|
||||
# Patch Flash (for larger code sections)
|
||||
content, patched = patch_segment_size(content, "irom0_0_seg", TESTING_FLASH_SIZE, "Flash")
|
||||
if patched:
|
||||
patches_applied.append("Flash")
|
||||
|
||||
if patches_applied:
|
||||
iram_mb = int(TESTING_IRAM_SIZE, 16) // (1024 * 1024)
|
||||
dram_mb = int(TESTING_DRAM_SIZE, 16) // (1024 * 1024)
|
||||
flash_mb = int(TESTING_FLASH_SIZE, 16) // (1024 * 1024)
|
||||
print(f" Patched memory segments: {', '.join(patches_applied)} (IRAM/DRAM: {iram_mb}MB, Flash: {flash_mb}MB)")
|
||||
|
||||
return content
|
||||
|
||||
|
||||
def patch_linker_script_file(filepath, description):
|
||||
"""Patch a linker script file in the build directory with enlarged memory segments.
|
||||
|
||||
This function modifies linker scripts in the build directory only (never SDK files).
|
||||
It patches IRAM, DRAM, and Flash segments to allow larger builds in testing mode.
|
||||
|
||||
Args:
|
||||
filepath: Path to the linker script file in the build directory
|
||||
description: Human-readable description for logging
|
||||
|
||||
Returns:
|
||||
True if the file was patched, False if already patched or not found
|
||||
"""
|
||||
if not os.path.exists(filepath):
|
||||
print(f"ESPHome: {description} not found at {filepath}")
|
||||
return False
|
||||
|
||||
print(f"ESPHome: Patching {description}...")
|
||||
with open(filepath, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
patched_content = apply_memory_patches(content)
|
||||
|
||||
if patched_content != content:
|
||||
with open(filepath, "w") as f:
|
||||
f.write(patched_content)
|
||||
print(f"ESPHome: Successfully patched {description}")
|
||||
return True
|
||||
else:
|
||||
print(f"ESPHome: {description} already patched or no changes needed")
|
||||
return False
|
||||
|
||||
|
||||
def patch_local_linker_script(source, target, env):
|
||||
"""Patch the local.eagle.app.v6.common.ld in build directory.
|
||||
|
||||
This patches the preprocessed linker script that PlatformIO creates in the build
|
||||
directory, enlarging IRAM, DRAM, and Flash segments for testing mode.
|
||||
|
||||
Args:
|
||||
source: SCons source nodes
|
||||
target: SCons target nodes
|
||||
env: SCons environment
|
||||
"""
|
||||
# Check if we're in testing mode
|
||||
build_flags = env.get("BUILD_FLAGS", [])
|
||||
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||
|
||||
if not testing_mode:
|
||||
return
|
||||
|
||||
# Patch the local linker script if it exists
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
ld_dir = os.path.join(build_dir, "ld")
|
||||
if os.path.exists(ld_dir):
|
||||
local_ld = os.path.join(ld_dir, "local.eagle.app.v6.common.ld")
|
||||
if os.path.exists(local_ld):
|
||||
patch_linker_script_file(local_ld, "local.eagle.app.v6.common.ld")
|
||||
|
||||
|
||||
# Check if we're in testing mode
|
||||
build_flags = env.get("BUILD_FLAGS", [])
|
||||
testing_mode = any("-DESPHOME_TESTING_MODE" in flag for flag in build_flags)
|
||||
|
||||
if testing_mode:
|
||||
# Create a custom linker script in the build directory with patched memory limits
|
||||
# This allows larger IRAM/DRAM/Flash for CI component grouping tests
|
||||
build_dir = env.subst("$BUILD_DIR")
|
||||
ldscript = env.GetProjectOption("board_build.ldscript", "")
|
||||
assert ldscript, "No linker script configured in board_build.ldscript"
|
||||
|
||||
framework_dir = env.PioPlatform().get_package_dir("framework-arduinoespressif8266")
|
||||
assert framework_dir is not None, "Could not find framework-arduinoespressif8266 package"
|
||||
|
||||
# Read the original SDK linker script (read-only, SDK is never modified)
|
||||
sdk_ld = os.path.join(framework_dir, "tools", "sdk", "ld", ldscript)
|
||||
# Create a custom version in the build directory (isolated, temporary)
|
||||
custom_ld = os.path.join(build_dir, f"testing_{ldscript}")
|
||||
|
||||
if os.path.exists(sdk_ld) and not os.path.exists(custom_ld):
|
||||
# Read the SDK linker script
|
||||
with open(sdk_ld, "r") as f:
|
||||
content = f.read()
|
||||
|
||||
# Apply memory patches (IRAM: 2MB, DRAM: 2MB, Flash: 32MB)
|
||||
patched_content = apply_memory_patches(content)
|
||||
|
||||
# Write the patched linker script to the build directory
|
||||
with open(custom_ld, "w") as f:
|
||||
f.write(patched_content)
|
||||
|
||||
print(f"ESPHome: Created custom linker script: {custom_ld}")
|
||||
|
||||
# Tell the linker to use our custom script from the build directory
|
||||
assert os.path.exists(custom_ld), f"Custom linker script not found: {custom_ld}"
|
||||
env.Replace(LDSCRIPT_PATH=custom_ld)
|
||||
print(f"ESPHome: Using custom linker script with patched memory limits")
|
||||
|
||||
# Also patch local.eagle.app.v6.common.ld after PlatformIO creates it
|
||||
env.AddPreAction("$BUILD_DIR/${PROGNAME}.elf", patch_local_linker_script)
|
||||
@@ -14,13 +14,13 @@ template<typename... Ts> class SendAction : public Action<Ts...>, public Parente
|
||||
TEMPLATABLE_VALUE(std::vector<uint8_t>, data);
|
||||
|
||||
public:
|
||||
void add_on_sent(const std::vector<Action<Ts...> *> &actions) {
|
||||
void add_on_sent(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
this->sent_.add_actions(actions);
|
||||
if (this->flags_.wait_for_sent) {
|
||||
this->sent_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
|
||||
}
|
||||
}
|
||||
void add_on_error(const std::vector<Action<Ts...> *> &actions) {
|
||||
void add_on_error(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
this->error_.add_actions(actions);
|
||||
if (this->flags_.wait_for_sent) {
|
||||
this->error_.add_action(new LambdaAction<Ts...>([this](Ts... x) {
|
||||
|
||||
@@ -8,12 +8,19 @@ namespace event {
|
||||
static const char *const TAG = "event";
|
||||
|
||||
void Event::trigger(const std::string &event_type) {
|
||||
auto found = types_.find(event_type);
|
||||
if (found == types_.end()) {
|
||||
// Linear search - faster than std::set for small datasets (1-5 items typical)
|
||||
const std::string *found = nullptr;
|
||||
for (const auto &type : this->types_) {
|
||||
if (type == event_type) {
|
||||
found = &type;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (found == nullptr) {
|
||||
ESP_LOGE(TAG, "'%s': invalid event type for trigger(): %s", this->get_name().c_str(), event_type.c_str());
|
||||
return;
|
||||
}
|
||||
last_event_type = &(*found);
|
||||
last_event_type = found;
|
||||
ESP_LOGD(TAG, "'%s' Triggered event '%s'", this->get_name().c_str(), last_event_type->c_str());
|
||||
this->event_callback_.call(event_type);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
#pragma once
|
||||
|
||||
#include <set>
|
||||
#include <string>
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
@@ -26,13 +25,13 @@ class Event : public EntityBase, public EntityBase_DeviceClass {
|
||||
const std::string *last_event_type;
|
||||
|
||||
void trigger(const std::string &event_type);
|
||||
void set_event_types(const std::set<std::string> &event_types) { this->types_ = event_types; }
|
||||
std::set<std::string> get_event_types() const { return this->types_; }
|
||||
void set_event_types(const std::initializer_list<std::string> &event_types) { this->types_ = event_types; }
|
||||
const FixedVector<std::string> &get_event_types() const { return this->types_; }
|
||||
void add_on_event_callback(std::function<void(const std::string &event_type)> &&callback);
|
||||
|
||||
protected:
|
||||
CallbackManager<void(const std::string &event_type)> event_callback_;
|
||||
std::set<std::string> types_;
|
||||
FixedVector<std::string> types_;
|
||||
};
|
||||
|
||||
} // namespace event
|
||||
|
||||
@@ -67,7 +67,7 @@ void GPIOSwitch::write_state(bool state) {
|
||||
this->pin_->digital_write(state);
|
||||
this->publish_state(state);
|
||||
}
|
||||
void GPIOSwitch::set_interlock(const std::vector<Switch *> &interlock) { this->interlock_ = interlock; }
|
||||
void GPIOSwitch::set_interlock(const std::initializer_list<Switch *> &interlock) { this->interlock_ = interlock; }
|
||||
|
||||
} // namespace gpio
|
||||
} // namespace esphome
|
||||
|
||||
@@ -2,10 +2,9 @@
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/components/switch/switch.h"
|
||||
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace gpio {
|
||||
|
||||
@@ -19,14 +18,14 @@ class GPIOSwitch : public switch_::Switch, public Component {
|
||||
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
void set_interlock(const std::vector<Switch *> &interlock);
|
||||
void set_interlock(const std::initializer_list<Switch *> &interlock);
|
||||
void set_interlock_wait_time(uint32_t interlock_wait_time) { interlock_wait_time_ = interlock_wait_time; }
|
||||
|
||||
protected:
|
||||
void write_state(bool state) override;
|
||||
|
||||
GPIOPin *pin_;
|
||||
std::vector<Switch *> interlock_;
|
||||
FixedVector<Switch *> interlock_;
|
||||
uint32_t interlock_wait_time_{0};
|
||||
};
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ import re
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.const import __version__
|
||||
from esphome.cpp_generator import MockObj
|
||||
from esphome.types import ConfigType
|
||||
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
|
||||
@@ -35,7 +37,9 @@ def _process_next_url(url: str):
|
||||
return url
|
||||
|
||||
|
||||
async def setup_improv_core(var, config):
|
||||
if CONF_NEXT_URL in config:
|
||||
cg.add(var.set_next_url(_process_next_url(config[CONF_NEXT_URL])))
|
||||
async def setup_improv_core(var: MockObj, config: ConfigType, component: str):
|
||||
if next_url := config.get(CONF_NEXT_URL):
|
||||
cg.add(var.set_next_url(_process_next_url(next_url)))
|
||||
cg.add_define(f"USE_{component.upper()}_NEXT_URL")
|
||||
|
||||
cg.add_library("improv/Improv", "1.2.4")
|
||||
|
||||
@@ -2,10 +2,12 @@
|
||||
|
||||
#include "esphome/components/network/util.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/defines.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace improv_base {
|
||||
|
||||
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
|
||||
static constexpr const char DEVICE_NAME_PLACEHOLDER[] = "{{device_name}}";
|
||||
static constexpr size_t DEVICE_NAME_PLACEHOLDER_LEN = sizeof(DEVICE_NAME_PLACEHOLDER) - 1;
|
||||
static constexpr const char IP_ADDRESS_PLACEHOLDER[] = "{{ip_address}}";
|
||||
@@ -43,6 +45,7 @@ std::string ImprovBase::get_formatted_next_url_() {
|
||||
|
||||
return formatted_url;
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace improv_base
|
||||
} // namespace esphome
|
||||
|
||||
@@ -1,17 +1,22 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include "esphome/core/defines.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace improv_base {
|
||||
|
||||
class ImprovBase {
|
||||
public:
|
||||
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
|
||||
void set_next_url(const std::string &next_url) { this->next_url_ = next_url; }
|
||||
#endif
|
||||
|
||||
protected:
|
||||
#if defined(USE_ESP32_IMPROV_NEXT_URL) || defined(USE_IMPROV_SERIAL_NEXT_URL)
|
||||
std::string get_formatted_next_url_();
|
||||
std::string next_url_;
|
||||
#endif
|
||||
};
|
||||
|
||||
} // namespace improv_base
|
||||
|
||||
@@ -43,4 +43,4 @@ FINAL_VALIDATE_SCHEMA = validate_logger
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
await improv_base.setup_improv_core(var, config)
|
||||
await improv_base.setup_improv_core(var, config, "improv_serial")
|
||||
|
||||
@@ -146,9 +146,11 @@ void ImprovSerialComponent::loop() {
|
||||
|
||||
std::vector<uint8_t> ImprovSerialComponent::build_rpc_settings_response_(improv::Command command) {
|
||||
std::vector<std::string> urls;
|
||||
#ifdef USE_IMPROV_SERIAL_NEXT_URL
|
||||
if (!this->next_url_.empty()) {
|
||||
urls.push_back(this->get_formatted_next_url_());
|
||||
}
|
||||
#endif
|
||||
#ifdef USE_WEBSERVER
|
||||
for (auto &ip : wifi::global_wifi_component->wifi_sta_ip_addresses()) {
|
||||
if (ip.is_ip4()) {
|
||||
|
||||
@@ -61,8 +61,12 @@ void AddressableLightTransformer::start() {
|
||||
this->target_color_ *= to_uint8_scale(end_values.get_brightness() * end_values.get_state());
|
||||
}
|
||||
|
||||
inline constexpr uint8_t subtract_scaled_difference(uint8_t a, uint8_t b, int32_t scale) {
|
||||
return uint8_t(int32_t(a) - (((int32_t(a) - int32_t(b)) * scale) / 256));
|
||||
}
|
||||
|
||||
optional<LightColorValues> AddressableLightTransformer::apply() {
|
||||
float smoothed_progress = LightTransitionTransformer::smoothed_progress(this->get_progress_());
|
||||
float smoothed_progress = LightTransformer::smoothed_progress(this->get_progress_());
|
||||
|
||||
// When running an output-buffer modifying effect, don't try to transition individual LEDs, but instead just fade the
|
||||
// LightColorValues. write_state() then picks up the change in brightness, and the color change is picked up by the
|
||||
@@ -74,38 +78,37 @@ optional<LightColorValues> AddressableLightTransformer::apply() {
|
||||
// all LEDs, we use the current state of each LED as the start.
|
||||
|
||||
// We can't use a direct lerp smoothing here though - that would require creating a copy of the original
|
||||
// state of each LED at the start of the transition.
|
||||
// Instead, we "fake" the look of the LERP by using an exponential average over time and using
|
||||
// dynamically-calculated alpha values to match the look.
|
||||
// state of each LED at the start of the transition. Instead, we "fake" the look of lerp by calculating
|
||||
// the delta between the current state and the target state, assuming that the delta represents the rest
|
||||
// of the transition that was to be applied as of the previous transition step, and scaling the delta for
|
||||
// what should be left after the current transition step. In this manner, the delta decays to zero as the
|
||||
// transition progresses.
|
||||
//
|
||||
// Here's an example of how the algorithm progresses in discrete steps:
|
||||
//
|
||||
// At time = 0.00, 0% complete, 100% remaining, 100% will remain after this step, so the scale is 100% / 100% = 100%.
|
||||
// At time = 0.10, 0% complete, 100% remaining, 90% will remain after this step, so the scale is 90% / 100% = 90%.
|
||||
// At time = 0.20, 10% complete, 90% remaining, 80% will remain after this step, so the scale is 80% / 90% = 88.9%.
|
||||
// At time = 0.50, 20% complete, 80% remaining, 50% will remain after this step, so the scale is 50% / 80% = 62.5%.
|
||||
// At time = 0.90, 50% complete, 50% remaining, 10% will remain after this step, so the scale is 10% / 50% = 20%.
|
||||
// At time = 0.91, 90% complete, 10% remaining, 9% will remain after this step, so the scale is 9% / 10% = 90%.
|
||||
// At time = 1.00, 91% complete, 9% remaining, 0% will remain after this step, so the scale is 0% / 9% = 0%.
|
||||
//
|
||||
// Because the color values are quantized to 8 bit resolution after each step, the transition may appear
|
||||
// non-linear when applying small deltas.
|
||||
|
||||
float denom = (1.0f - smoothed_progress);
|
||||
float alpha = denom == 0.0f ? 1.0f : (smoothed_progress - this->last_transition_progress_) / denom;
|
||||
|
||||
// We need to use a low-resolution alpha here which makes the transition set in only after ~half of the length
|
||||
// We solve this by accumulating the fractional part of the alpha over time.
|
||||
float alpha255 = alpha * 255.0f;
|
||||
float alpha255int = floorf(alpha255);
|
||||
float alpha255remainder = alpha255 - alpha255int;
|
||||
|
||||
this->accumulated_alpha_ += alpha255remainder;
|
||||
float alpha_add = floorf(this->accumulated_alpha_);
|
||||
this->accumulated_alpha_ -= alpha_add;
|
||||
|
||||
alpha255 += alpha_add;
|
||||
alpha255 = clamp(alpha255, 0.0f, 255.0f);
|
||||
auto alpha8 = static_cast<uint8_t>(alpha255);
|
||||
|
||||
if (alpha8 != 0) {
|
||||
uint8_t inv_alpha8 = 255 - alpha8;
|
||||
Color add = this->target_color_ * alpha8;
|
||||
|
||||
for (auto led : this->light_)
|
||||
led.set(add + led.get() * inv_alpha8);
|
||||
if (smoothed_progress > this->last_transition_progress_ && this->last_transition_progress_ < 1.f) {
|
||||
int32_t scale = int32_t(256.f * std::max((1.f - smoothed_progress) / (1.f - this->last_transition_progress_), 0.f));
|
||||
for (auto led : this->light_) {
|
||||
led.set_rgbw(subtract_scaled_difference(this->target_color_.red, led.get_red(), scale),
|
||||
subtract_scaled_difference(this->target_color_.green, led.get_green(), scale),
|
||||
subtract_scaled_difference(this->target_color_.blue, led.get_blue(), scale),
|
||||
subtract_scaled_difference(this->target_color_.white, led.get_white(), scale));
|
||||
}
|
||||
this->last_transition_progress_ = smoothed_progress;
|
||||
this->light_.schedule_show();
|
||||
}
|
||||
|
||||
this->last_transition_progress_ = smoothed_progress;
|
||||
this->light_.schedule_show();
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
#include "esphome/core/defines.h"
|
||||
#include "light_output.h"
|
||||
#include "light_state.h"
|
||||
#include "transformers.h"
|
||||
#include "light_transformer.h"
|
||||
|
||||
#ifdef USE_POWER_SUPPLY
|
||||
#include "esphome/components/power_supply/power_supply.h"
|
||||
@@ -103,7 +103,7 @@ class AddressableLight : public LightOutput, public Component {
|
||||
bool effect_active_{false};
|
||||
};
|
||||
|
||||
class AddressableLightTransformer : public LightTransitionTransformer {
|
||||
class AddressableLightTransformer : public LightTransformer {
|
||||
public:
|
||||
AddressableLightTransformer(AddressableLight &light) : light_(light) {}
|
||||
|
||||
@@ -113,7 +113,6 @@ class AddressableLightTransformer : public LightTransitionTransformer {
|
||||
protected:
|
||||
AddressableLight &light_;
|
||||
float last_transition_progress_{0.0f};
|
||||
float accumulated_alpha_{0.0f};
|
||||
Color target_color_{};
|
||||
};
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
#pragma once
|
||||
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/components/light/light_state.h"
|
||||
#include "esphome/components/light/addressable_light.h"
|
||||
|
||||
@@ -113,7 +113,7 @@ struct AddressableColorWipeEffectColor {
|
||||
class AddressableColorWipeEffect : public AddressableLightEffect {
|
||||
public:
|
||||
explicit AddressableColorWipeEffect(const std::string &name) : AddressableLightEffect(name) {}
|
||||
void set_colors(const std::vector<AddressableColorWipeEffectColor> &colors) { this->colors_ = colors; }
|
||||
void set_colors(const std::initializer_list<AddressableColorWipeEffectColor> &colors) { this->colors_ = colors; }
|
||||
void set_add_led_interval(uint32_t add_led_interval) { this->add_led_interval_ = add_led_interval; }
|
||||
void set_reverse(bool reverse) { this->reverse_ = reverse; }
|
||||
void apply(AddressableLight &it, const Color ¤t_color) override {
|
||||
@@ -155,7 +155,7 @@ class AddressableColorWipeEffect : public AddressableLightEffect {
|
||||
}
|
||||
|
||||
protected:
|
||||
std::vector<AddressableColorWipeEffectColor> colors_;
|
||||
FixedVector<AddressableColorWipeEffectColor> colors_;
|
||||
size_t at_color_{0};
|
||||
uint32_t last_add_{0};
|
||||
uint32_t add_led_interval_{};
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
#pragma once
|
||||
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "esphome/core/automation.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "light_effect.h"
|
||||
|
||||
namespace esphome {
|
||||
@@ -188,10 +188,10 @@ class StrobeLightEffect : public LightEffect {
|
||||
this->last_switch_ = now;
|
||||
}
|
||||
|
||||
void set_colors(const std::vector<StrobeLightEffectColor> &colors) { this->colors_ = colors; }
|
||||
void set_colors(const std::initializer_list<StrobeLightEffectColor> &colors) { this->colors_ = colors; }
|
||||
|
||||
protected:
|
||||
std::vector<StrobeLightEffectColor> colors_;
|
||||
FixedVector<StrobeLightEffectColor> colors_;
|
||||
uint32_t last_switch_{0};
|
||||
size_t at_color_{0};
|
||||
};
|
||||
|
||||
@@ -17,19 +17,19 @@ class ESPColorCorrection {
|
||||
this->color_correct_blue(color.blue), this->color_correct_white(color.white));
|
||||
}
|
||||
inline uint8_t color_correct_red(uint8_t red) const ESPHOME_ALWAYS_INLINE {
|
||||
uint8_t res = esp_scale8(esp_scale8(red, this->max_brightness_.red), this->local_brightness_);
|
||||
uint8_t res = esp_scale8_twice(red, this->max_brightness_.red, this->local_brightness_);
|
||||
return this->gamma_table_[res];
|
||||
}
|
||||
inline uint8_t color_correct_green(uint8_t green) const ESPHOME_ALWAYS_INLINE {
|
||||
uint8_t res = esp_scale8(esp_scale8(green, this->max_brightness_.green), this->local_brightness_);
|
||||
uint8_t res = esp_scale8_twice(green, this->max_brightness_.green, this->local_brightness_);
|
||||
return this->gamma_table_[res];
|
||||
}
|
||||
inline uint8_t color_correct_blue(uint8_t blue) const ESPHOME_ALWAYS_INLINE {
|
||||
uint8_t res = esp_scale8(esp_scale8(blue, this->max_brightness_.blue), this->local_brightness_);
|
||||
uint8_t res = esp_scale8_twice(blue, this->max_brightness_.blue, this->local_brightness_);
|
||||
return this->gamma_table_[res];
|
||||
}
|
||||
inline uint8_t color_correct_white(uint8_t white) const ESPHOME_ALWAYS_INLINE {
|
||||
uint8_t res = esp_scale8(esp_scale8(white, this->max_brightness_.white), this->local_brightness_);
|
||||
uint8_t res = esp_scale8_twice(white, this->max_brightness_.white, this->local_brightness_);
|
||||
return this->gamma_table_[res];
|
||||
}
|
||||
inline Color color_uncorrect(Color color) const ESPHOME_ALWAYS_INLINE {
|
||||
|
||||
@@ -38,6 +38,10 @@ class LightTransformer {
|
||||
const LightColorValues &get_target_values() const { return this->target_values_; }
|
||||
|
||||
protected:
|
||||
// This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like
|
||||
// transition from 0 to 1 on x = [0, 1]
|
||||
static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); }
|
||||
|
||||
/// The progress of this transition, on a scale of 0 to 1.
|
||||
float get_progress_() {
|
||||
uint32_t now = esphome::millis();
|
||||
|
||||
@@ -50,15 +50,11 @@ class LightTransitionTransformer : public LightTransformer {
|
||||
if (this->changing_color_mode_)
|
||||
p = p < 0.5f ? p * 2 : (p - 0.5) * 2;
|
||||
|
||||
float v = LightTransitionTransformer::smoothed_progress(p);
|
||||
float v = LightTransformer::smoothed_progress(p);
|
||||
return LightColorValues::lerp(start, end, v);
|
||||
}
|
||||
|
||||
protected:
|
||||
// This looks crazy, but it reduces to 6x^5 - 15x^4 + 10x^3 which is just a smooth sigmoid-like
|
||||
// transition from 0 to 1 on x = [0, 1]
|
||||
static float smoothed_progress(float x) { return x * x * x * (x * (x * 6.0f - 15.0f) + 10.0f); }
|
||||
|
||||
LightColorValues end_values_{};
|
||||
LightColorValues intermediate_values_{};
|
||||
bool changing_color_mode_{false};
|
||||
|
||||
@@ -4,6 +4,7 @@ from esphome.components.esp32 import (
|
||||
VARIANT_ESP32H2,
|
||||
add_idf_sdkconfig_option,
|
||||
only_on_variant,
|
||||
require_vfs_select,
|
||||
)
|
||||
from esphome.components.mdns import MDNSComponent, enable_mdns_storage
|
||||
import esphome.config_validation as cv
|
||||
@@ -106,6 +107,14 @@ _CONNECTION_SCHEMA = cv.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _require_vfs_select(config):
|
||||
"""Register VFS select requirement during config validation."""
|
||||
# OpenThread uses esp_vfs_eventfd which requires VFS select support
|
||||
require_vfs_select()
|
||||
return config
|
||||
|
||||
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
@@ -122,6 +131,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
cv.has_exactly_one_key(CONF_NETWORK_KEY, CONF_TLV),
|
||||
cv.only_with_esp_idf,
|
||||
only_on_variant(supported=[VARIANT_ESP32C6, VARIANT_ESP32H2]),
|
||||
_require_vfs_select,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -28,6 +28,8 @@ from esphome.const import (
|
||||
CONF_ON_RAW_VALUE,
|
||||
CONF_ON_VALUE,
|
||||
CONF_ON_VALUE_RANGE,
|
||||
CONF_OPTIMISTIC,
|
||||
CONF_PERIOD,
|
||||
CONF_QUANTILE,
|
||||
CONF_SEND_EVERY,
|
||||
CONF_SEND_FIRST_AT,
|
||||
@@ -644,10 +646,29 @@ async def throttle_with_priority_filter_to_code(config, filter_id):
|
||||
return cg.new_Pvariable(filter_id, config[CONF_TIMEOUT], template_)
|
||||
|
||||
|
||||
HEARTBEAT_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_PERIOD): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(CONF_OPTIMISTIC, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@FILTER_REGISTRY.register(
|
||||
"heartbeat", HeartbeatFilter, cv.positive_time_period_milliseconds
|
||||
"heartbeat",
|
||||
HeartbeatFilter,
|
||||
cv.Any(
|
||||
cv.positive_time_period_milliseconds,
|
||||
HEARTBEAT_SCHEMA,
|
||||
),
|
||||
)
|
||||
async def heartbeat_filter_to_code(config, filter_id):
|
||||
if isinstance(config, dict):
|
||||
var = cg.new_Pvariable(filter_id, config[CONF_PERIOD])
|
||||
await cg.register_component(var, {})
|
||||
cg.add(var.set_optimistic(config[CONF_OPTIMISTIC]))
|
||||
return var
|
||||
|
||||
var = cg.new_Pvariable(filter_id, config)
|
||||
await cg.register_component(var, {})
|
||||
return var
|
||||
|
||||
@@ -313,7 +313,7 @@ optional<float> DeltaFilter::new_value(float value) {
|
||||
}
|
||||
|
||||
// OrFilter
|
||||
OrFilter::OrFilter(std::vector<Filter *> filters) : filters_(std::move(filters)), phi_(this) {}
|
||||
OrFilter::OrFilter(std::initializer_list<Filter *> filters) : filters_(filters), phi_(this) {}
|
||||
OrFilter::PhiNode::PhiNode(OrFilter *or_parent) : or_parent_(or_parent) {}
|
||||
|
||||
optional<float> OrFilter::PhiNode::new_value(float value) {
|
||||
@@ -326,14 +326,14 @@ optional<float> OrFilter::PhiNode::new_value(float value) {
|
||||
}
|
||||
optional<float> OrFilter::new_value(float value) {
|
||||
this->has_value_ = false;
|
||||
for (Filter *filter : this->filters_)
|
||||
for (auto *filter : this->filters_)
|
||||
filter->input(value);
|
||||
|
||||
return {};
|
||||
}
|
||||
void OrFilter::initialize(Sensor *parent, Filter *next) {
|
||||
Filter::initialize(parent, next);
|
||||
for (Filter *filter : this->filters_) {
|
||||
for (auto *filter : this->filters_) {
|
||||
filter->initialize(parent, &this->phi_);
|
||||
}
|
||||
this->phi_.initialize(parent, nullptr);
|
||||
@@ -372,8 +372,12 @@ optional<float> HeartbeatFilter::new_value(float value) {
|
||||
this->last_input_ = value;
|
||||
this->has_value_ = true;
|
||||
|
||||
if (this->optimistic_) {
|
||||
return value;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
void HeartbeatFilter::setup() {
|
||||
this->set_interval("heartbeat", this->time_period_, [this]() {
|
||||
ESP_LOGVV(TAG, "HeartbeatFilter(%p)::interval(has_value=%s, last_input=%f)", this, YESNO(this->has_value_),
|
||||
@@ -384,20 +388,27 @@ void HeartbeatFilter::setup() {
|
||||
this->output(this->last_input_);
|
||||
});
|
||||
}
|
||||
|
||||
float HeartbeatFilter::get_setup_priority() const { return setup_priority::HARDWARE; }
|
||||
|
||||
CalibrateLinearFilter::CalibrateLinearFilter(std::initializer_list<std::array<float, 3>> linear_functions)
|
||||
: linear_functions_(linear_functions) {}
|
||||
|
||||
optional<float> CalibrateLinearFilter::new_value(float value) {
|
||||
for (std::array<float, 3> f : this->linear_functions_) {
|
||||
for (const auto &f : this->linear_functions_) {
|
||||
if (!std::isfinite(f[2]) || value < f[2])
|
||||
return (value * f[0]) + f[1];
|
||||
}
|
||||
return NAN;
|
||||
}
|
||||
|
||||
CalibratePolynomialFilter::CalibratePolynomialFilter(std::initializer_list<float> coefficients)
|
||||
: coefficients_(coefficients) {}
|
||||
|
||||
optional<float> CalibratePolynomialFilter::new_value(float value) {
|
||||
float res = 0.0f;
|
||||
float x = 1.0f;
|
||||
for (float coefficient : this->coefficients_) {
|
||||
for (const auto &coefficient : this->coefficients_) {
|
||||
res += x * coefficient;
|
||||
x *= value;
|
||||
}
|
||||
|
||||
@@ -396,15 +396,16 @@ class HeartbeatFilter : public Filter, public Component {
|
||||
explicit HeartbeatFilter(uint32_t time_period);
|
||||
|
||||
void setup() override;
|
||||
|
||||
optional<float> new_value(float value) override;
|
||||
|
||||
float get_setup_priority() const override;
|
||||
|
||||
void set_optimistic(bool optimistic) { this->optimistic_ = optimistic; }
|
||||
|
||||
protected:
|
||||
uint32_t time_period_;
|
||||
float last_input_;
|
||||
bool has_value_{false};
|
||||
bool optimistic_{false};
|
||||
};
|
||||
|
||||
class DeltaFilter : public Filter {
|
||||
@@ -422,7 +423,7 @@ class DeltaFilter : public Filter {
|
||||
|
||||
class OrFilter : public Filter {
|
||||
public:
|
||||
explicit OrFilter(std::vector<Filter *> filters);
|
||||
explicit OrFilter(std::initializer_list<Filter *> filters);
|
||||
|
||||
void initialize(Sensor *parent, Filter *next) override;
|
||||
|
||||
@@ -438,28 +439,27 @@ class OrFilter : public Filter {
|
||||
OrFilter *or_parent_;
|
||||
};
|
||||
|
||||
std::vector<Filter *> filters_;
|
||||
FixedVector<Filter *> filters_;
|
||||
PhiNode phi_;
|
||||
bool has_value_{false};
|
||||
};
|
||||
|
||||
class CalibrateLinearFilter : public Filter {
|
||||
public:
|
||||
CalibrateLinearFilter(std::vector<std::array<float, 3>> linear_functions)
|
||||
: linear_functions_(std::move(linear_functions)) {}
|
||||
explicit CalibrateLinearFilter(std::initializer_list<std::array<float, 3>> linear_functions);
|
||||
optional<float> new_value(float value) override;
|
||||
|
||||
protected:
|
||||
std::vector<std::array<float, 3>> linear_functions_;
|
||||
FixedVector<std::array<float, 3>> linear_functions_;
|
||||
};
|
||||
|
||||
class CalibratePolynomialFilter : public Filter {
|
||||
public:
|
||||
CalibratePolynomialFilter(std::vector<float> coefficients) : coefficients_(std::move(coefficients)) {}
|
||||
explicit CalibratePolynomialFilter(std::initializer_list<float> coefficients);
|
||||
optional<float> new_value(float value) override;
|
||||
|
||||
protected:
|
||||
std::vector<float> coefficients_;
|
||||
FixedVector<float> coefficients_;
|
||||
};
|
||||
|
||||
class ClampFilter : public Filter {
|
||||
|
||||
@@ -110,17 +110,28 @@ def validate_mapping(value):
|
||||
"substitute", SubstituteFilter, cv.ensure_list(validate_mapping)
|
||||
)
|
||||
async def substitute_filter_to_code(config, filter_id):
|
||||
from_strings = [conf[CONF_FROM] for conf in config]
|
||||
to_strings = [conf[CONF_TO] for conf in config]
|
||||
return cg.new_Pvariable(filter_id, from_strings, to_strings)
|
||||
substitutions = [
|
||||
cg.StructInitializer(
|
||||
cg.MockObj("Substitution", "esphome::text_sensor::"),
|
||||
("from", conf[CONF_FROM]),
|
||||
("to", conf[CONF_TO]),
|
||||
)
|
||||
for conf in config
|
||||
]
|
||||
return cg.new_Pvariable(filter_id, substitutions)
|
||||
|
||||
|
||||
@FILTER_REGISTRY.register("map", MapFilter, cv.ensure_list(validate_mapping))
|
||||
async def map_filter_to_code(config, filter_id):
|
||||
map_ = cg.std_ns.class_("map").template(cg.std_string, cg.std_string)
|
||||
return cg.new_Pvariable(
|
||||
filter_id, map_([(item[CONF_FROM], item[CONF_TO]) for item in config])
|
||||
)
|
||||
mappings = [
|
||||
cg.StructInitializer(
|
||||
cg.MockObj("Substitution", "esphome::text_sensor::"),
|
||||
("from", conf[CONF_FROM]),
|
||||
("to", conf[CONF_TO]),
|
||||
)
|
||||
for conf in config
|
||||
]
|
||||
return cg.new_Pvariable(filter_id, mappings)
|
||||
|
||||
|
||||
validate_device_class = cv.one_of(*DEVICE_CLASSES, lower=True, space="_")
|
||||
|
||||
@@ -62,19 +62,27 @@ optional<std::string> AppendFilter::new_value(std::string value) { return value
|
||||
optional<std::string> PrependFilter::new_value(std::string value) { return this->prefix_ + value; }
|
||||
|
||||
// Substitute
|
||||
SubstituteFilter::SubstituteFilter(const std::initializer_list<Substitution> &substitutions)
|
||||
: substitutions_(substitutions) {}
|
||||
|
||||
optional<std::string> SubstituteFilter::new_value(std::string value) {
|
||||
std::size_t pos;
|
||||
for (size_t i = 0; i < this->from_strings_.size(); i++) {
|
||||
while ((pos = value.find(this->from_strings_[i])) != std::string::npos)
|
||||
value.replace(pos, this->from_strings_[i].size(), this->to_strings_[i]);
|
||||
for (const auto &sub : this->substitutions_) {
|
||||
while ((pos = value.find(sub.from)) != std::string::npos)
|
||||
value.replace(pos, sub.from.size(), sub.to);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
// Map
|
||||
MapFilter::MapFilter(const std::initializer_list<Substitution> &mappings) : mappings_(mappings) {}
|
||||
|
||||
optional<std::string> MapFilter::new_value(std::string value) {
|
||||
auto item = mappings_.find(value);
|
||||
return item == mappings_.end() ? value : item->second;
|
||||
for (const auto &mapping : this->mappings_) {
|
||||
if (mapping.from == value)
|
||||
return mapping.to;
|
||||
}
|
||||
return value; // Pass through if no match
|
||||
}
|
||||
|
||||
} // namespace text_sensor
|
||||
|
||||
@@ -2,10 +2,6 @@
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include <queue>
|
||||
#include <utility>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
namespace esphome {
|
||||
namespace text_sensor {
|
||||
@@ -98,26 +94,52 @@ class PrependFilter : public Filter {
|
||||
std::string prefix_;
|
||||
};
|
||||
|
||||
struct Substitution {
|
||||
std::string from;
|
||||
std::string to;
|
||||
};
|
||||
|
||||
/// A simple filter that replaces a substring with another substring
|
||||
class SubstituteFilter : public Filter {
|
||||
public:
|
||||
SubstituteFilter(std::vector<std::string> from_strings, std::vector<std::string> to_strings)
|
||||
: from_strings_(std::move(from_strings)), to_strings_(std::move(to_strings)) {}
|
||||
explicit SubstituteFilter(const std::initializer_list<Substitution> &substitutions);
|
||||
optional<std::string> new_value(std::string value) override;
|
||||
|
||||
protected:
|
||||
std::vector<std::string> from_strings_;
|
||||
std::vector<std::string> to_strings_;
|
||||
FixedVector<Substitution> substitutions_;
|
||||
};
|
||||
|
||||
/// A filter that maps values from one set to another
|
||||
/** A filter that maps values from one set to another
|
||||
*
|
||||
* Uses linear search instead of std::map for typical small datasets (2-20 mappings).
|
||||
* Linear search on contiguous memory is faster than red-black tree lookups when:
|
||||
* - Dataset is small (< ~30 items)
|
||||
* - Memory is contiguous (cache-friendly, better CPU cache utilization)
|
||||
* - No pointer chasing overhead (tree node traversal)
|
||||
* - String comparison cost dominates lookup time
|
||||
*
|
||||
* Benchmark results (see benchmark_map_filter.cpp):
|
||||
* - 2 mappings: Linear 1.26x faster than std::map
|
||||
* - 5 mappings: Linear 2.25x faster than std::map
|
||||
* - 10 mappings: Linear 1.83x faster than std::map
|
||||
* - 20 mappings: Linear 1.59x faster than std::map
|
||||
* - 30 mappings: Linear 1.09x faster than std::map
|
||||
* - 40 mappings: std::map 1.27x faster than Linear (break-even)
|
||||
*
|
||||
* Benefits over std::map:
|
||||
* - ~2KB smaller flash (no red-black tree code)
|
||||
* - ~24-32 bytes less RAM per mapping (no tree node overhead)
|
||||
* - Faster for typical ESPHome usage (2-10 mappings common, 20+ rare)
|
||||
*
|
||||
* Break-even point: ~35-40 mappings, but ESPHome configs rarely exceed 20
|
||||
*/
|
||||
class MapFilter : public Filter {
|
||||
public:
|
||||
MapFilter(std::map<std::string, std::string> mappings) : mappings_(std::move(mappings)) {}
|
||||
explicit MapFilter(const std::initializer_list<Substitution> &mappings);
|
||||
optional<std::string> new_value(std::string value) override;
|
||||
|
||||
protected:
|
||||
std::map<std::string, std::string> mappings_;
|
||||
FixedVector<Substitution> mappings_;
|
||||
};
|
||||
|
||||
} // namespace text_sensor
|
||||
|
||||
@@ -56,6 +56,13 @@ uint32_t ESP8266UartComponent::get_config() {
|
||||
}
|
||||
|
||||
void ESP8266UartComponent::setup() {
|
||||
if (this->rx_pin_) {
|
||||
this->rx_pin_->setup();
|
||||
}
|
||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
||||
this->tx_pin_->setup();
|
||||
}
|
||||
|
||||
// Use Arduino HardwareSerial UARTs if all used pins match the ones
|
||||
// preconfigured by the platform. For example if RX disabled but TX pin
|
||||
// is 1 we still want to use Serial.
|
||||
|
||||
@@ -6,6 +6,9 @@
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/gpio.h"
|
||||
#include "driver/gpio.h"
|
||||
#include "soc/gpio_num.h"
|
||||
|
||||
#ifdef USE_LOGGER
|
||||
#include "esphome/components/logger/logger.h"
|
||||
@@ -104,6 +107,13 @@ void IDFUARTComponent::load_settings(bool dump_config) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this->rx_pin_) {
|
||||
this->rx_pin_->setup();
|
||||
}
|
||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
||||
this->tx_pin_->setup();
|
||||
}
|
||||
|
||||
int8_t tx = this->tx_pin_ != nullptr ? this->tx_pin_->get_pin() : -1;
|
||||
int8_t rx = this->rx_pin_ != nullptr ? this->rx_pin_->get_pin() : -1;
|
||||
int8_t flow_control = this->flow_control_pin_ != nullptr ? this->flow_control_pin_->get_pin() : -1;
|
||||
|
||||
@@ -46,6 +46,13 @@ uint16_t LibreTinyUARTComponent::get_config() {
|
||||
}
|
||||
|
||||
void LibreTinyUARTComponent::setup() {
|
||||
if (this->rx_pin_) {
|
||||
this->rx_pin_->setup();
|
||||
}
|
||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
||||
this->tx_pin_->setup();
|
||||
}
|
||||
|
||||
int8_t tx_pin = tx_pin_ == nullptr ? -1 : tx_pin_->get_pin();
|
||||
int8_t rx_pin = rx_pin_ == nullptr ? -1 : rx_pin_->get_pin();
|
||||
bool tx_inverted = tx_pin_ != nullptr && tx_pin_->is_inverted();
|
||||
|
||||
@@ -52,6 +52,13 @@ uint16_t RP2040UartComponent::get_config() {
|
||||
}
|
||||
|
||||
void RP2040UartComponent::setup() {
|
||||
if (this->rx_pin_) {
|
||||
this->rx_pin_->setup();
|
||||
}
|
||||
if (this->tx_pin_ && this->rx_pin_ != this->tx_pin_) {
|
||||
this->tx_pin_->setup();
|
||||
}
|
||||
|
||||
uint16_t config = get_config();
|
||||
|
||||
constexpr uint32_t valid_tx_uart_0 = __bitset({0, 12, 16, 28});
|
||||
|
||||
@@ -378,14 +378,19 @@ async def to_code(config):
|
||||
# Track if any network uses Enterprise authentication
|
||||
has_eap = False
|
||||
|
||||
def add_sta(ap, network):
|
||||
ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP))
|
||||
cg.add(var.add_sta(wifi_network(network, ap, ip_config)))
|
||||
# Initialize FixedVector with the count of networks
|
||||
networks = config.get(CONF_NETWORKS, [])
|
||||
if networks:
|
||||
cg.add(var.init_sta(len(networks)))
|
||||
|
||||
for network in config.get(CONF_NETWORKS, []):
|
||||
if CONF_EAP in network:
|
||||
has_eap = True
|
||||
cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network)
|
||||
def add_sta(ap: cg.MockObj, network: dict) -> None:
|
||||
ip_config = network.get(CONF_MANUAL_IP, config.get(CONF_MANUAL_IP))
|
||||
cg.add(var.add_sta(wifi_network(network, ap, ip_config)))
|
||||
|
||||
for network in networks:
|
||||
if CONF_EAP in network:
|
||||
has_eap = True
|
||||
cg.with_local_variable(network[CONF_ID], WiFiAP(), add_sta, network)
|
||||
|
||||
if CONF_AP in config:
|
||||
conf = config[CONF_AP]
|
||||
|
||||
@@ -330,9 +330,11 @@ float WiFiComponent::get_loop_priority() const {
|
||||
return 10.0f; // before other loop components
|
||||
}
|
||||
|
||||
void WiFiComponent::init_sta(size_t count) { this->sta_.init(count); }
|
||||
void WiFiComponent::add_sta(const WiFiAP &ap) { this->sta_.push_back(ap); }
|
||||
void WiFiComponent::set_sta(const WiFiAP &ap) {
|
||||
this->clear_sta();
|
||||
this->init_sta(1);
|
||||
this->add_sta(ap);
|
||||
}
|
||||
void WiFiComponent::clear_sta() { this->sta_.clear(); }
|
||||
|
||||
@@ -219,6 +219,7 @@ class WiFiComponent : public Component {
|
||||
|
||||
void set_sta(const WiFiAP &ap);
|
||||
WiFiAP get_sta() { return this->selected_ap_; }
|
||||
void init_sta(size_t count);
|
||||
void add_sta(const WiFiAP &ap);
|
||||
void clear_sta();
|
||||
|
||||
@@ -393,7 +394,7 @@ class WiFiComponent : public Component {
|
||||
#endif
|
||||
|
||||
std::string use_address_;
|
||||
std::vector<WiFiAP> sta_;
|
||||
FixedVector<WiFiAP> sta_;
|
||||
std::vector<WiFiSTAPriority> sta_priorities_;
|
||||
wifi_scan_vector_t<WiFiScanResult> scan_result_;
|
||||
WiFiAP selected_ap_;
|
||||
|
||||
@@ -471,6 +471,7 @@ CONF_IMPORT_REACTIVE_ENERGY = "import_reactive_energy"
|
||||
CONF_INC_PIN = "inc_pin"
|
||||
CONF_INCLUDE_INTERNAL = "include_internal"
|
||||
CONF_INCLUDES = "includes"
|
||||
CONF_INCLUDES_C = "includes_c"
|
||||
CONF_INDEX = "index"
|
||||
CONF_INDOOR = "indoor"
|
||||
CONF_INFRARED = "infrared"
|
||||
|
||||
@@ -243,7 +243,7 @@ template<typename... Ts> class ActionList {
|
||||
}
|
||||
this->actions_end_ = action;
|
||||
}
|
||||
void add_actions(const std::vector<Action<Ts...> *> &actions) {
|
||||
void add_actions(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
for (auto *action : actions) {
|
||||
this->add_action(action);
|
||||
}
|
||||
@@ -286,7 +286,7 @@ template<typename... Ts> class Automation {
|
||||
explicit Automation(Trigger<Ts...> *trigger) : trigger_(trigger) { this->trigger_->set_automation_parent(this); }
|
||||
|
||||
void add_action(Action<Ts...> *action) { this->actions_.add_action(action); }
|
||||
void add_actions(const std::vector<Action<Ts...> *> &actions) { this->actions_.add_actions(actions); }
|
||||
void add_actions(const std::initializer_list<Action<Ts...> *> &actions) { this->actions_.add_actions(actions); }
|
||||
|
||||
void stop() { this->actions_.stop(); }
|
||||
|
||||
|
||||
@@ -194,12 +194,12 @@ template<typename... Ts> class IfAction : public Action<Ts...> {
|
||||
public:
|
||||
explicit IfAction(Condition<Ts...> *condition) : condition_(condition) {}
|
||||
|
||||
void add_then(const std::vector<Action<Ts...> *> &actions) {
|
||||
void add_then(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
this->then_.add_actions(actions);
|
||||
this->then_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
|
||||
}
|
||||
|
||||
void add_else(const std::vector<Action<Ts...> *> &actions) {
|
||||
void add_else(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
this->else_.add_actions(actions);
|
||||
this->else_.add_action(new LambdaAction<Ts...>([this](Ts... x) { this->play_next_(x...); }));
|
||||
}
|
||||
@@ -240,7 +240,7 @@ template<typename... Ts> class WhileAction : public Action<Ts...> {
|
||||
public:
|
||||
WhileAction(Condition<Ts...> *condition) : condition_(condition) {}
|
||||
|
||||
void add_then(const std::vector<Action<Ts...> *> &actions) {
|
||||
void add_then(const std::initializer_list<Action<Ts...> *> &actions) {
|
||||
this->then_.add_actions(actions);
|
||||
this->then_.add_action(new LambdaAction<Ts...>([this](Ts... x) {
|
||||
if (this->num_running_ > 0 && this->condition_->check_tuple(this->var_)) {
|
||||
@@ -287,7 +287,7 @@ template<typename... Ts> class RepeatAction : public Action<Ts...> {
|
||||
public:
|
||||
TEMPLATABLE_VALUE(uint32_t, count)
|
||||
|
||||
void add_then(const std::vector<Action<uint32_t, Ts...> *> &actions) {
|
||||
void add_then(const std::initializer_list<Action<uint32_t, Ts...> *> &actions) {
|
||||
this->then_.add_actions(actions);
|
||||
this->then_.add_action(new LambdaAction<uint32_t, Ts...>([this](uint32_t iteration, Ts... x) {
|
||||
iteration++;
|
||||
|
||||
@@ -14,6 +14,15 @@ inline static constexpr uint8_t esp_scale8(uint8_t i, uint8_t scale) {
|
||||
return (uint16_t(i) * (1 + uint16_t(scale))) / 256;
|
||||
}
|
||||
|
||||
/// Scale an 8-bit value by two 8-bit scale factors with improved precision.
|
||||
/// This is more accurate than calling esp_scale8() twice because it delays
|
||||
/// truncation until after both multiplications, preserving intermediate precision.
|
||||
/// For example: esp_scale8_twice(value, max_brightness, local_brightness)
|
||||
/// gives better results than esp_scale8(esp_scale8(value, max_brightness), local_brightness)
|
||||
inline static constexpr uint8_t esp_scale8_twice(uint8_t i, uint8_t scale1, uint8_t scale2) {
|
||||
return (uint32_t(i) * (1 + uint32_t(scale1)) * (1 + uint32_t(scale2))) >> 16;
|
||||
}
|
||||
|
||||
struct Color {
|
||||
union {
|
||||
struct {
|
||||
|
||||
@@ -21,6 +21,7 @@ from esphome.const import (
|
||||
CONF_FRIENDLY_NAME,
|
||||
CONF_ID,
|
||||
CONF_INCLUDES,
|
||||
CONF_INCLUDES_C,
|
||||
CONF_LIBRARIES,
|
||||
CONF_MIN_VERSION,
|
||||
CONF_NAME,
|
||||
@@ -227,6 +228,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_INCLUDES, default=[]): cv.ensure_list(valid_include),
|
||||
cv.Optional(CONF_INCLUDES_C, default=[]): cv.ensure_list(valid_include),
|
||||
cv.Optional(CONF_LIBRARIES, default=[]): cv.ensure_list(cv.string_strict),
|
||||
cv.Optional(CONF_NAME_ADD_MAC_SUFFIX, default=False): cv.boolean,
|
||||
cv.Optional(CONF_DEBUG_SCHEDULER, default=False): cv.boolean,
|
||||
@@ -302,6 +304,17 @@ def _list_target_platforms():
|
||||
return target_platforms
|
||||
|
||||
|
||||
def _sort_includes_by_type(includes: list[str]) -> tuple[list[str], list[str]]:
|
||||
system_includes = []
|
||||
other_includes = []
|
||||
for include in includes:
|
||||
if include.startswith("<") and include.endswith(">"):
|
||||
system_includes.append(include)
|
||||
else:
|
||||
other_includes.append(include)
|
||||
return system_includes, other_includes
|
||||
|
||||
|
||||
def preload_core_config(config, result) -> str:
|
||||
with cv.prepend_path(CONF_ESPHOME):
|
||||
conf = PRELOAD_CONFIG_SCHEMA(config[CONF_ESPHOME])
|
||||
@@ -339,7 +352,7 @@ def preload_core_config(config, result) -> str:
|
||||
return target_platforms[0]
|
||||
|
||||
|
||||
def include_file(path: Path, basename: Path):
|
||||
def include_file(path: Path, basename: Path, is_c_header: bool = False):
|
||||
parts = basename.parts
|
||||
dst = CORE.relative_src_path(*parts)
|
||||
copy_file_if_changed(path, dst)
|
||||
@@ -347,7 +360,14 @@ def include_file(path: Path, basename: Path):
|
||||
ext = path.suffix
|
||||
if ext in [".h", ".hpp", ".tcc"]:
|
||||
# Header, add include statement
|
||||
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
|
||||
if is_c_header:
|
||||
# Wrap in extern "C" block for C headers
|
||||
cg.add_global(
|
||||
cg.RawStatement(f'extern "C" {{\n #include "{basename}"\n}}')
|
||||
)
|
||||
else:
|
||||
# Regular include
|
||||
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
|
||||
|
||||
|
||||
ARDUINO_GLUE_CODE = """\
|
||||
@@ -377,7 +397,7 @@ async def add_arduino_global_workaround():
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.FINAL)
|
||||
async def add_includes(includes: list[str]) -> None:
|
||||
async def add_includes(includes: list[str], is_c_header: bool = False) -> None:
|
||||
# Add includes at the very end, so that the included files can access global variables
|
||||
for include in includes:
|
||||
path = CORE.relative_config_path(include)
|
||||
@@ -385,11 +405,11 @@ async def add_includes(includes: list[str]) -> None:
|
||||
# Directory, copy tree
|
||||
for p in walk_files(path):
|
||||
basename = p.relative_to(path.parent)
|
||||
include_file(p, basename)
|
||||
include_file(p, basename, is_c_header)
|
||||
else:
|
||||
# Copy file
|
||||
basename = Path(path.name)
|
||||
include_file(path, basename)
|
||||
include_file(path, basename, is_c_header)
|
||||
|
||||
|
||||
@coroutine_with_priority(CoroPriority.FINAL)
|
||||
@@ -494,19 +514,25 @@ async def to_code(config: ConfigType) -> None:
|
||||
CORE.add_job(add_arduino_global_workaround)
|
||||
|
||||
if config[CONF_INCLUDES]:
|
||||
# Get the <...> includes
|
||||
system_includes = []
|
||||
other_includes = []
|
||||
for include in config[CONF_INCLUDES]:
|
||||
if include.startswith("<") and include.endswith(">"):
|
||||
system_includes.append(include)
|
||||
else:
|
||||
other_includes.append(include)
|
||||
system_includes, other_includes = _sort_includes_by_type(config[CONF_INCLUDES])
|
||||
# <...> includes should be at the start
|
||||
for include in system_includes:
|
||||
cg.add_global(cg.RawStatement(f"#include {include}"), prepend=True)
|
||||
# Other includes should be at the end
|
||||
CORE.add_job(add_includes, other_includes)
|
||||
CORE.add_job(add_includes, other_includes, False)
|
||||
|
||||
if config[CONF_INCLUDES_C]:
|
||||
system_includes, other_includes = _sort_includes_by_type(
|
||||
config[CONF_INCLUDES_C]
|
||||
)
|
||||
# <...> includes should be at the start
|
||||
for include in system_includes:
|
||||
cg.add_global(
|
||||
cg.RawStatement(f'extern "C" {{\n #include {include}\n}}'),
|
||||
prepend=True,
|
||||
)
|
||||
# Other includes should be at the end
|
||||
CORE.add_job(add_includes, other_includes, True)
|
||||
|
||||
if project_conf := config.get(CONF_PROJECT):
|
||||
cg.add_define("ESPHOME_PROJECT_NAME", project_conf[CONF_NAME])
|
||||
|
||||
@@ -44,6 +44,7 @@
|
||||
#define USE_GRAPHICAL_DISPLAY_MENU
|
||||
#define USE_HOMEASSISTANT_TIME
|
||||
#define USE_HTTP_REQUEST_OTA_WATCHDOG_TIMEOUT 8000 // NOLINT
|
||||
#define USE_IMPROV_SERIAL_NEXT_URL
|
||||
#define USE_JSON
|
||||
#define USE_LIGHT
|
||||
#define USE_LOCK
|
||||
@@ -186,6 +187,7 @@
|
||||
#define USE_ESP32_CAMERA_JPEG_ENCODER
|
||||
#define USE_I2C
|
||||
#define USE_IMPROV
|
||||
#define USE_ESP32_IMPROV_NEXT_URL
|
||||
#define USE_MICROPHONE
|
||||
#define USE_PSRAM
|
||||
#define USE_SOCKET_IMPL_BSD_SOCKETS
|
||||
@@ -241,8 +243,10 @@
|
||||
// Dummy firmware payload for shelly_dimmer
|
||||
#define USE_SHD_FIRMWARE_MAJOR_VERSION 56
|
||||
#define USE_SHD_FIRMWARE_MINOR_VERSION 5
|
||||
// clang-format off
|
||||
#define USE_SHD_FIRMWARE_DATA \
|
||||
{}
|
||||
// clang-format on
|
||||
|
||||
#define USE_WEBSERVER
|
||||
#define USE_WEBSERVER_AUTH
|
||||
|
||||
@@ -194,12 +194,8 @@ template<typename T> class FixedVector {
|
||||
size_ = 0;
|
||||
}
|
||||
|
||||
public:
|
||||
FixedVector() = default;
|
||||
|
||||
/// Constructor from initializer list - allocates exact size needed
|
||||
/// This enables brace initialization: FixedVector<int> v = {1, 2, 3};
|
||||
FixedVector(std::initializer_list<T> init_list) {
|
||||
// Helper to assign from initializer list (shared by constructor and assignment operator)
|
||||
void assign_from_initializer_list_(std::initializer_list<T> init_list) {
|
||||
init(init_list.size());
|
||||
size_t idx = 0;
|
||||
for (const auto &item : init_list) {
|
||||
@@ -209,6 +205,13 @@ template<typename T> class FixedVector {
|
||||
size_ = init_list.size();
|
||||
}
|
||||
|
||||
public:
|
||||
FixedVector() = default;
|
||||
|
||||
/// Constructor from initializer list - allocates exact size needed
|
||||
/// This enables brace initialization: FixedVector<int> v = {1, 2, 3};
|
||||
FixedVector(std::initializer_list<T> init_list) { assign_from_initializer_list_(init_list); }
|
||||
|
||||
~FixedVector() { cleanup_(); }
|
||||
|
||||
// Disable copy operations (avoid accidental expensive copies)
|
||||
@@ -234,6 +237,15 @@ template<typename T> class FixedVector {
|
||||
return *this;
|
||||
}
|
||||
|
||||
/// Assignment from initializer list - avoids temporary and move overhead
|
||||
/// This enables: FixedVector<int> v; v = {1, 2, 3};
|
||||
FixedVector &operator=(std::initializer_list<T> init_list) {
|
||||
cleanup_();
|
||||
reset_();
|
||||
assign_from_initializer_list_(init_list);
|
||||
return *this;
|
||||
}
|
||||
|
||||
// Allocate capacity - can be called multiple times to reinit
|
||||
void init(size_t n) {
|
||||
cleanup_();
|
||||
|
||||
@@ -46,6 +46,10 @@ lib_deps =
|
||||
; This is using the repository until a new release is published to PlatformIO
|
||||
https://github.com/Sensirion/arduino-gas-index-algorithm.git#3.2.1 ; Sensirion Gas Index Algorithm Arduino Library
|
||||
lvgl/lvgl@8.4.0 ; lvgl
|
||||
; This dependency is used only in unit tests.
|
||||
; Must coincide with PLATFORMIO_GOOGLE_TEST_LIB in scripts/cpp_unit_test.py
|
||||
; See scripts/cpp_unit_test.py and tests/components/README.md
|
||||
google/googletest@^1.15.2
|
||||
build_flags =
|
||||
-DESPHOME_LOG_LEVEL=ESPHOME_LOG_LEVEL_VERY_VERBOSE
|
||||
-std=gnu++20
|
||||
|
||||
172
script/cpp_unit_test.py
Executable file
172
script/cpp_unit_test.py
Executable file
@@ -0,0 +1,172 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import hashlib
|
||||
import os
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from helpers import get_all_components, get_all_dependencies, root_path
|
||||
|
||||
from esphome.__main__ import command_compile, parse_args
|
||||
from esphome.config import validate_config
|
||||
from esphome.core import CORE
|
||||
from esphome.platformio_api import get_idedata
|
||||
|
||||
# This must coincide with the version in /platformio.ini
|
||||
PLATFORMIO_GOOGLE_TEST_LIB = "google/googletest@^1.15.2"
|
||||
|
||||
# Path to /tests/components
|
||||
COMPONENTS_TESTS_DIR: Path = Path(root_path) / "tests" / "components"
|
||||
|
||||
|
||||
def hash_components(components: list[str]) -> str:
|
||||
key = ",".join(components)
|
||||
return hashlib.sha256(key.encode()).hexdigest()[:16]
|
||||
|
||||
|
||||
def filter_components_without_tests(components: list[str]) -> list[str]:
|
||||
"""Filter out components that do not have a corresponding test file.
|
||||
|
||||
This is done by checking if the component's directory contains at
|
||||
least a .cpp file.
|
||||
"""
|
||||
filtered_components: list[str] = []
|
||||
for component in components:
|
||||
test_dir = COMPONENTS_TESTS_DIR / component
|
||||
if test_dir.is_dir() and any(test_dir.glob("*.cpp")):
|
||||
filtered_components.append(component)
|
||||
else:
|
||||
print(
|
||||
f"WARNING: No tests found for component '{component}', skipping.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return filtered_components
|
||||
|
||||
|
||||
def create_test_config(config_name: str, includes: list[str]) -> dict:
|
||||
"""Create ESPHome test configuration for C++ unit tests.
|
||||
|
||||
Args:
|
||||
config_name: Unique name for this test configuration
|
||||
includes: List of include folders for the test build
|
||||
|
||||
Returns:
|
||||
Configuration dict for ESPHome
|
||||
"""
|
||||
return {
|
||||
"esphome": {
|
||||
"name": config_name,
|
||||
"friendly_name": "CPP Unit Tests",
|
||||
"libraries": PLATFORMIO_GOOGLE_TEST_LIB,
|
||||
"platformio_options": {
|
||||
"build_type": "debug",
|
||||
"build_unflags": [
|
||||
"-Os", # remove size-opt flag
|
||||
],
|
||||
"build_flags": [
|
||||
"-Og", # optimize for debug
|
||||
],
|
||||
"debug_build_flags": [ # only for debug builds
|
||||
"-g3", # max debug info
|
||||
"-ggdb3",
|
||||
],
|
||||
},
|
||||
"includes": includes,
|
||||
},
|
||||
"host": {},
|
||||
"logger": {"level": "DEBUG"},
|
||||
}
|
||||
|
||||
|
||||
def run_tests(selected_components: list[str]) -> int:
|
||||
# Skip tests on Windows
|
||||
if os.name == "nt":
|
||||
print("Skipping esphome tests on Windows", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
# Remove components that do not have tests
|
||||
components = filter_components_without_tests(selected_components)
|
||||
|
||||
if len(components) == 0:
|
||||
print(
|
||||
"No components specified or no tests found for the specified components.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return 0
|
||||
|
||||
components = sorted(components)
|
||||
|
||||
# Obtain possible dependencies for the requested components:
|
||||
components_with_dependencies = sorted(get_all_dependencies(set(components)))
|
||||
|
||||
# Build a list of include folders, one folder per component containing tests.
|
||||
# A special replacement main.cpp is located in /tests/components/main.cpp
|
||||
includes: list[str] = ["main.cpp"] + components
|
||||
|
||||
# Create a unique name for this config based on the actual components being tested
|
||||
# to maximize cache during testing
|
||||
config_name: str = "cpptests-" + hash_components(components)
|
||||
|
||||
config = create_test_config(config_name, includes)
|
||||
|
||||
CORE.config_path = COMPONENTS_TESTS_DIR / "dummy.yaml"
|
||||
CORE.dashboard = None
|
||||
|
||||
# Validate config will expand the above with defaults:
|
||||
config = validate_config(config, {})
|
||||
|
||||
# Add all components and dependencies to the base configuration after validation, so their files
|
||||
# are added to the build.
|
||||
config.update({key: {} for key in components_with_dependencies})
|
||||
|
||||
print(f"Testing components: {', '.join(components)}")
|
||||
CORE.config = config
|
||||
args = parse_args(["program", "compile", str(CORE.config_path)])
|
||||
try:
|
||||
exit_code: int = command_compile(args, config)
|
||||
|
||||
if exit_code != 0:
|
||||
print(f"Error compiling unit tests for {', '.join(components)}")
|
||||
return exit_code
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Error compiling unit tests for {', '.join(components)}. Check path. : {e}"
|
||||
)
|
||||
return 2
|
||||
|
||||
# After a successful compilation, locate the executable and run it:
|
||||
idedata = get_idedata(config)
|
||||
if idedata is None:
|
||||
print("Cannot find executable")
|
||||
return 1
|
||||
|
||||
program_path: str = idedata.raw["prog_path"]
|
||||
run_cmd: list[str] = [program_path]
|
||||
run_proc = subprocess.run(run_cmd, check=False)
|
||||
return run_proc.returncode
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Run C++ unit tests for ESPHome components."
|
||||
)
|
||||
parser.add_argument(
|
||||
"components",
|
||||
nargs="*",
|
||||
help="List of components to test. Use --all to test all known components.",
|
||||
)
|
||||
parser.add_argument("--all", action="store_true", help="Test all known components.")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.all:
|
||||
components: list[str] = get_all_components()
|
||||
else:
|
||||
components: list[str] = args.components
|
||||
|
||||
sys.exit(run_tests(components))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -43,7 +43,6 @@ from enum import StrEnum
|
||||
from functools import cache
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any
|
||||
@@ -53,10 +52,16 @@ from helpers import (
|
||||
CPP_FILE_EXTENSIONS,
|
||||
PYTHON_FILE_EXTENSIONS,
|
||||
changed_files,
|
||||
core_changed,
|
||||
filter_component_and_test_cpp_files,
|
||||
filter_component_and_test_files,
|
||||
get_all_dependencies,
|
||||
get_changed_components,
|
||||
get_component_from_path,
|
||||
get_component_test_files,
|
||||
get_components_from_integration_fixtures,
|
||||
get_components_with_dependencies,
|
||||
get_cpp_changed_components,
|
||||
git_ls_files,
|
||||
parse_test_filename,
|
||||
root_path,
|
||||
@@ -141,10 +146,9 @@ def should_run_integration_tests(branch: str | None = None) -> bool:
|
||||
"""
|
||||
files = changed_files(branch)
|
||||
|
||||
# Check if any core files changed (esphome/core/*)
|
||||
for file in files:
|
||||
if file.startswith("esphome/core/"):
|
||||
return True
|
||||
if core_changed(files):
|
||||
# If any core files changed, run integration tests
|
||||
return True
|
||||
|
||||
# Check if any integration test files changed
|
||||
if any("tests/integration" in file for file in files):
|
||||
@@ -281,6 +285,40 @@ def should_run_python_linters(branch: str | None = None) -> bool:
|
||||
return _any_changed_file_endswith(branch, PYTHON_FILE_EXTENSIONS)
|
||||
|
||||
|
||||
def determine_cpp_unit_tests(
|
||||
branch: str | None = None,
|
||||
) -> tuple[bool, list[str]]:
|
||||
"""Determine if C++ unit tests should run based on changed files.
|
||||
|
||||
This function is used by the CI workflow to skip C++ unit tests when
|
||||
no relevant files have changed, saving CI time and resources.
|
||||
|
||||
C++ unit tests will run when any of the following conditions are met:
|
||||
|
||||
1. Any C++ core source files changed (esphome/core/*), in which case
|
||||
all cpp unit tests run.
|
||||
2. A test file for a component changed, which triggers tests for that
|
||||
component.
|
||||
3. The code for a component changed, which triggers tests for that
|
||||
component and all components that depend on it.
|
||||
|
||||
Args:
|
||||
branch: Branch to compare against. If None, uses default.
|
||||
|
||||
Returns:
|
||||
Tuple of (run_all, components) where:
|
||||
- run_all: True if all tests should run, False otherwise
|
||||
- components: List of specific components to test (empty if run_all)
|
||||
"""
|
||||
files = changed_files(branch)
|
||||
if core_changed(files):
|
||||
return (True, [])
|
||||
|
||||
# Filter to only C++ files
|
||||
cpp_files = list(filter(filter_component_and_test_cpp_files, files))
|
||||
return (False, get_cpp_changed_components(cpp_files))
|
||||
|
||||
|
||||
def _any_changed_file_endswith(branch: str | None, extensions: tuple[str, ...]) -> bool:
|
||||
"""Check if a changed file ends with any of the specified extensions."""
|
||||
return any(file.endswith(extensions) for file in changed_files(branch))
|
||||
@@ -561,16 +599,31 @@ def main() -> None:
|
||||
run_python_linters = should_run_python_linters(args.branch)
|
||||
changed_cpp_file_count = count_changed_cpp_files(args.branch)
|
||||
|
||||
# Get both directly changed and all changed components (with dependencies) in one call
|
||||
script_path = Path(__file__).parent / "list-components.py"
|
||||
cmd = [sys.executable, str(script_path), "--changed-with-deps"]
|
||||
if args.branch:
|
||||
cmd.extend(["-b", args.branch])
|
||||
# Get changed components
|
||||
# get_changed_components() returns:
|
||||
# None: Core files changed (need full scan)
|
||||
# []: No components changed
|
||||
# [list]: Changed components (already includes dependencies)
|
||||
changed_components_result = get_changed_components()
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
||||
component_data = json.loads(result.stdout)
|
||||
directly_changed_components = component_data["directly_changed"]
|
||||
changed_components = component_data["all_changed"]
|
||||
# Always analyze component files, even if core files changed
|
||||
# This is needed for component testing and memory impact analysis
|
||||
changed = changed_files(args.branch)
|
||||
component_files = [f for f in changed if filter_component_and_test_files(f)]
|
||||
|
||||
directly_changed_components = get_components_with_dependencies(
|
||||
component_files, False
|
||||
)
|
||||
|
||||
if changed_components_result is None:
|
||||
# Core files changed - will trigger full clang-tidy scan
|
||||
# But we still need to track changed components for testing and memory analysis
|
||||
changed_components = get_components_with_dependencies(component_files, True)
|
||||
is_core_change = True
|
||||
else:
|
||||
# Use the result from get_changed_components() which includes dependencies
|
||||
changed_components = changed_components_result
|
||||
is_core_change = False
|
||||
|
||||
# Filter to only components that have test files
|
||||
# Components without tests shouldn't generate CI test jobs
|
||||
@@ -581,11 +634,11 @@ def main() -> None:
|
||||
# Get directly changed components with tests (for isolated testing)
|
||||
# These will be tested WITHOUT --testing-mode in CI to enable full validation
|
||||
# (pin conflicts, etc.) since they contain the actual changes being reviewed
|
||||
directly_changed_with_tests = [
|
||||
directly_changed_with_tests = {
|
||||
component
|
||||
for component in directly_changed_components
|
||||
if _component_has_tests(component)
|
||||
]
|
||||
}
|
||||
|
||||
# Get dependency-only components (for grouped testing)
|
||||
dependency_only_components = [
|
||||
@@ -599,7 +652,8 @@ def main() -> None:
|
||||
|
||||
# Determine clang-tidy mode based on actual files that will be checked
|
||||
if run_clang_tidy:
|
||||
is_full_scan = _is_clang_tidy_full_scan()
|
||||
# Full scan needed if: hash changed OR core files changed
|
||||
is_full_scan = _is_clang_tidy_full_scan() or is_core_change
|
||||
|
||||
if is_full_scan:
|
||||
# Full scan checks all files - always use split mode for efficiency
|
||||
@@ -630,6 +684,9 @@ def main() -> None:
|
||||
files_to_check_count = 0
|
||||
|
||||
# Build output
|
||||
# Determine which C++ unit tests to run
|
||||
cpp_run_all, cpp_components = determine_cpp_unit_tests(args.branch)
|
||||
|
||||
output: dict[str, Any] = {
|
||||
"integration_tests": run_integration,
|
||||
"clang_tidy": run_clang_tidy,
|
||||
@@ -638,13 +695,15 @@ def main() -> None:
|
||||
"python_linters": run_python_linters,
|
||||
"changed_components": changed_components,
|
||||
"changed_components_with_tests": changed_components_with_tests,
|
||||
"directly_changed_components_with_tests": directly_changed_with_tests,
|
||||
"directly_changed_components_with_tests": list(directly_changed_with_tests),
|
||||
"dependency_only_components_with_tests": dependency_only_components,
|
||||
"component_test_count": len(changed_components_with_tests),
|
||||
"directly_changed_count": len(directly_changed_with_tests),
|
||||
"dependency_only_count": len(dependency_only_components),
|
||||
"changed_cpp_file_count": changed_cpp_file_count,
|
||||
"memory_impact": memory_impact,
|
||||
"cpp_unit_tests_run_all": cpp_run_all,
|
||||
"cpp_unit_tests_components": cpp_components,
|
||||
}
|
||||
|
||||
# Output as JSON
|
||||
|
||||
@@ -2,19 +2,14 @@
|
||||
|
||||
import json
|
||||
|
||||
from helpers import git_ls_files
|
||||
from helpers import get_all_component_files, get_components_with_dependencies
|
||||
|
||||
from esphome.automation import ACTION_REGISTRY, CONDITION_REGISTRY
|
||||
from esphome.pins import PIN_SCHEMA_REGISTRY
|
||||
|
||||
list_components = __import__("list-components")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
files = git_ls_files()
|
||||
files = filter(list_components.filter_component_files, files)
|
||||
|
||||
components = list_components.get_components(files, True)
|
||||
files = get_all_component_files()
|
||||
components = get_components_with_dependencies(files, True)
|
||||
|
||||
dump = {
|
||||
"actions": sorted(list(ACTION_REGISTRY.keys())),
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from functools import cache
|
||||
import json
|
||||
import os
|
||||
@@ -7,6 +8,7 @@ import os.path
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
@@ -23,12 +25,21 @@ CPP_FILE_EXTENSIONS = (".cpp", ".h", ".hpp", ".cc", ".cxx", ".c", ".tcc")
|
||||
# Python file extensions
|
||||
PYTHON_FILE_EXTENSIONS = (".py", ".pyi")
|
||||
|
||||
# Combined C++ and Python file extensions for convenience
|
||||
CPP_AND_PYTHON_FILE_EXTENSIONS = (*CPP_FILE_EXTENSIONS, *PYTHON_FILE_EXTENSIONS)
|
||||
|
||||
# YAML file extensions
|
||||
YAML_FILE_EXTENSIONS = (".yaml", ".yml")
|
||||
|
||||
# Component path prefix
|
||||
ESPHOME_COMPONENTS_PATH = "esphome/components/"
|
||||
|
||||
# Test components path prefix
|
||||
ESPHOME_TESTS_COMPONENTS_PATH = "tests/components/"
|
||||
|
||||
# Tuple of component and test paths for efficient startswith checks
|
||||
COMPONENT_AND_TESTS_PATHS = (ESPHOME_COMPONENTS_PATH, ESPHOME_TESTS_COMPONENTS_PATH)
|
||||
|
||||
# Base bus components - these ARE the bus implementations and should not
|
||||
# be flagged as needing migration since they are the platform/base components
|
||||
BASE_BUS_COMPONENTS = {
|
||||
@@ -304,7 +315,10 @@ def get_changed_components() -> list[str] | None:
|
||||
for f in changed
|
||||
)
|
||||
if core_cpp_changed:
|
||||
print("Core C++/header files changed - will run full clang-tidy scan")
|
||||
print(
|
||||
"Core C++/header files changed - will run full clang-tidy scan",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return None
|
||||
|
||||
# Use list-components.py to get changed components
|
||||
@@ -318,7 +332,10 @@ def get_changed_components() -> list[str] | None:
|
||||
return parse_list_components_output(result.stdout)
|
||||
except subprocess.CalledProcessError:
|
||||
# If the script fails, fall back to full scan
|
||||
print("Could not determine changed components - will run full clang-tidy scan")
|
||||
print(
|
||||
"Could not determine changed components - will run full clang-tidy scan",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@@ -370,14 +387,14 @@ def _filter_changed_ci(files: list[str]) -> list[str]:
|
||||
if f in changed and not f.startswith(ESPHOME_COMPONENTS_PATH)
|
||||
]
|
||||
if not files:
|
||||
print("No files changed")
|
||||
print("No files changed", file=sys.stderr)
|
||||
return files
|
||||
|
||||
# Scenario 3: Specific components changed
|
||||
# Action: Check ALL files in each changed component
|
||||
# Convert component list to set for O(1) lookups
|
||||
component_set = set(components)
|
||||
print(f"Changed components: {', '.join(sorted(components))}")
|
||||
print(f"Changed components: {', '.join(sorted(components))}", file=sys.stderr)
|
||||
|
||||
# The 'files' parameter contains ALL files in the codebase that clang-tidy would check.
|
||||
# We filter this down to only files in the changed components.
|
||||
@@ -648,3 +665,313 @@ def get_components_from_integration_fixtures() -> set[str]:
|
||||
components.add(item["platform"])
|
||||
|
||||
return components
|
||||
|
||||
|
||||
def filter_component_and_test_files(file_path: str) -> bool:
|
||||
"""Check if a file path is a component or test file.
|
||||
|
||||
Args:
|
||||
file_path: Path to check
|
||||
|
||||
Returns:
|
||||
True if the file is in a component or test directory
|
||||
"""
|
||||
return file_path.startswith(COMPONENT_AND_TESTS_PATHS) or (
|
||||
file_path.startswith(ESPHOME_TESTS_COMPONENTS_PATH)
|
||||
and file_path.endswith(YAML_FILE_EXTENSIONS)
|
||||
)
|
||||
|
||||
|
||||
def filter_component_and_test_cpp_files(file_path: str) -> bool:
|
||||
"""Check if a file is a C++ source file in component or test directories.
|
||||
|
||||
Args:
|
||||
file_path: Path to check
|
||||
|
||||
Returns:
|
||||
True if the file is a C++ source/header file in component or test directories
|
||||
"""
|
||||
return file_path.endswith(CPP_FILE_EXTENSIONS) and file_path.startswith(
|
||||
COMPONENT_AND_TESTS_PATHS
|
||||
)
|
||||
|
||||
|
||||
def extract_component_names_from_files(files: list[str]) -> list[str]:
|
||||
"""Extract unique component names from a list of file paths.
|
||||
|
||||
Args:
|
||||
files: List of file paths
|
||||
|
||||
Returns:
|
||||
List of unique component names (preserves order)
|
||||
"""
|
||||
return list(
|
||||
dict.fromkeys(comp for file in files if (comp := get_component_from_path(file)))
|
||||
)
|
||||
|
||||
|
||||
def add_item_to_components_graph(
|
||||
components_graph: dict[str, list[str]], parent: str, child: str
|
||||
) -> None:
|
||||
"""Add a dependency relationship to the components graph.
|
||||
|
||||
Args:
|
||||
components_graph: Graph mapping parent components to their children
|
||||
parent: Parent component name
|
||||
child: Child component name (dependent)
|
||||
"""
|
||||
if not parent.startswith("__") and parent != child:
|
||||
if parent not in components_graph:
|
||||
components_graph[parent] = []
|
||||
if child not in components_graph[parent]:
|
||||
components_graph[parent].append(child)
|
||||
|
||||
|
||||
def resolve_auto_load(
|
||||
auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]],
|
||||
config: dict | None = None,
|
||||
) -> list[str]:
|
||||
"""Resolve AUTO_LOAD to a list, handling callables with or without config parameter.
|
||||
|
||||
Args:
|
||||
auto_load: The AUTO_LOAD value (list or callable)
|
||||
config: Optional config to pass to callable AUTO_LOAD functions
|
||||
|
||||
Returns:
|
||||
List of component names to auto-load
|
||||
"""
|
||||
if not callable(auto_load):
|
||||
return auto_load
|
||||
|
||||
import inspect
|
||||
|
||||
if inspect.signature(auto_load).parameters:
|
||||
return auto_load(config)
|
||||
return auto_load()
|
||||
|
||||
|
||||
def create_components_graph() -> dict[str, list[str]]:
|
||||
"""Create a graph of component dependencies.
|
||||
|
||||
Returns:
|
||||
Dictionary mapping parent components to their children (dependencies)
|
||||
"""
|
||||
from pathlib import Path
|
||||
|
||||
from esphome import const
|
||||
from esphome.core import CORE
|
||||
from esphome.loader import ComponentManifest, get_component, get_platform
|
||||
|
||||
# The root directory of the repo
|
||||
root = Path(__file__).parent.parent
|
||||
components_dir = root / ESPHOME_COMPONENTS_PATH
|
||||
# Fake some directory so that get_component works
|
||||
CORE.config_path = root
|
||||
# Various configuration to capture different outcomes used by `AUTO_LOAD` function.
|
||||
KEY_CORE = const.KEY_CORE
|
||||
KEY_TARGET_FRAMEWORK = const.KEY_TARGET_FRAMEWORK
|
||||
KEY_TARGET_PLATFORM = const.KEY_TARGET_PLATFORM
|
||||
PLATFORM_ESP32 = const.PLATFORM_ESP32
|
||||
PLATFORM_ESP8266 = const.PLATFORM_ESP8266
|
||||
|
||||
TARGET_CONFIGURATIONS = [
|
||||
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None},
|
||||
{KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None},
|
||||
{KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None},
|
||||
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32},
|
||||
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266},
|
||||
]
|
||||
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
|
||||
|
||||
components_graph = {}
|
||||
platforms = []
|
||||
components: list[tuple[ComponentManifest, str, Path]] = []
|
||||
|
||||
for path in components_dir.iterdir():
|
||||
if not path.is_dir():
|
||||
continue
|
||||
if not (path / "__init__.py").is_file():
|
||||
continue
|
||||
name = path.name
|
||||
comp = get_component(name)
|
||||
if comp is None:
|
||||
raise RuntimeError(
|
||||
f"Cannot find component {name}. Make sure current path is pip installed ESPHome"
|
||||
)
|
||||
|
||||
components.append((comp, name, path))
|
||||
if comp.is_platform_component:
|
||||
platforms.append(name)
|
||||
|
||||
platforms = set(platforms)
|
||||
|
||||
for comp, name, path in components:
|
||||
for dependency in comp.dependencies:
|
||||
add_item_to_components_graph(
|
||||
components_graph, dependency.split(".")[0], name
|
||||
)
|
||||
|
||||
for target_config in TARGET_CONFIGURATIONS:
|
||||
CORE.data[KEY_CORE] = target_config
|
||||
for item in resolve_auto_load(comp.auto_load, config=None):
|
||||
add_item_to_components_graph(components_graph, item, name)
|
||||
# restore config
|
||||
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
|
||||
|
||||
for platform_path in path.iterdir():
|
||||
platform_name = platform_path.stem
|
||||
if platform_name == name or platform_name not in platforms:
|
||||
continue
|
||||
platform = get_platform(platform_name, name)
|
||||
if platform is None:
|
||||
continue
|
||||
|
||||
add_item_to_components_graph(components_graph, platform_name, name)
|
||||
|
||||
for dependency in platform.dependencies:
|
||||
add_item_to_components_graph(
|
||||
components_graph, dependency.split(".")[0], name
|
||||
)
|
||||
|
||||
for target_config in TARGET_CONFIGURATIONS:
|
||||
CORE.data[KEY_CORE] = target_config
|
||||
for item in resolve_auto_load(platform.auto_load, config={}):
|
||||
add_item_to_components_graph(components_graph, item, name)
|
||||
# restore config
|
||||
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
|
||||
|
||||
return components_graph
|
||||
|
||||
|
||||
def find_children_of_component(
|
||||
components_graph: dict[str, list[str]], component_name: str, depth: int = 0
|
||||
) -> list[str]:
|
||||
"""Find all components that depend on the given component (recursively).
|
||||
|
||||
Args:
|
||||
components_graph: Graph mapping parent components to their children
|
||||
component_name: Component name to find children for
|
||||
depth: Current recursion depth (max 10)
|
||||
|
||||
Returns:
|
||||
List of all dependent component names (may contain duplicates removed at end)
|
||||
"""
|
||||
if component_name not in components_graph:
|
||||
return []
|
||||
|
||||
children = []
|
||||
|
||||
for child in components_graph[component_name]:
|
||||
children.append(child)
|
||||
if depth < 10:
|
||||
children.extend(
|
||||
find_children_of_component(components_graph, child, depth + 1)
|
||||
)
|
||||
# Remove duplicate values
|
||||
return list(set(children))
|
||||
|
||||
|
||||
def get_components_with_dependencies(
|
||||
files: list[str], get_dependencies: bool = False
|
||||
) -> list[str]:
|
||||
"""Get component names from files, optionally including their dependencies.
|
||||
|
||||
Args:
|
||||
files: List of file paths
|
||||
get_dependencies: If True, include all dependent components
|
||||
|
||||
Returns:
|
||||
Sorted list of component names
|
||||
"""
|
||||
components = extract_component_names_from_files(files)
|
||||
|
||||
if get_dependencies:
|
||||
components_graph = create_components_graph()
|
||||
|
||||
all_components = components.copy()
|
||||
for c in components:
|
||||
all_components.extend(find_children_of_component(components_graph, c))
|
||||
# Remove duplicate values
|
||||
all_changed_components = list(set(all_components))
|
||||
|
||||
return sorted(all_changed_components)
|
||||
|
||||
return sorted(components)
|
||||
|
||||
|
||||
def get_all_component_files() -> list[str]:
|
||||
"""Get all component and test files from git.
|
||||
|
||||
Returns:
|
||||
List of all component and test file paths
|
||||
"""
|
||||
files = git_ls_files()
|
||||
return list(filter(filter_component_and_test_files, files))
|
||||
|
||||
|
||||
def get_all_components() -> list[str]:
|
||||
"""Get all component names.
|
||||
|
||||
This function uses git to find all component files and extracts the component names.
|
||||
It returns the same list as calling list-components.py without arguments.
|
||||
|
||||
Returns:
|
||||
List of all component names
|
||||
"""
|
||||
return get_components_with_dependencies(get_all_component_files(), False)
|
||||
|
||||
|
||||
def core_changed(files: list[str]) -> bool:
|
||||
"""Check if any core C++ or Python files have changed.
|
||||
|
||||
Args:
|
||||
files: List of file paths to check
|
||||
|
||||
Returns:
|
||||
True if any core C++ or Python files have changed
|
||||
"""
|
||||
return any(
|
||||
f.startswith("esphome/core/") and f.endswith(CPP_AND_PYTHON_FILE_EXTENSIONS)
|
||||
for f in files
|
||||
)
|
||||
|
||||
|
||||
def get_cpp_changed_components(files: list[str]) -> list[str]:
|
||||
"""Get components that have changed C++ files or tests.
|
||||
|
||||
This function analyzes a list of changed files and determines which components
|
||||
are affected. It handles two scenarios:
|
||||
|
||||
1. Test files changed (tests/components/<component>/*.cpp):
|
||||
- Adds the component to the affected list
|
||||
- Only that component needs to be tested
|
||||
|
||||
2. Component C++ files changed (esphome/components/<component>/*):
|
||||
- Adds the component to the affected list
|
||||
- Also adds all components that depend on this component (recursively)
|
||||
- This ensures that changes propagate to dependent components
|
||||
|
||||
Args:
|
||||
files: List of file paths to analyze (should be C++ files)
|
||||
|
||||
Returns:
|
||||
Sorted list of component names that need C++ unit tests run
|
||||
"""
|
||||
components_graph = create_components_graph()
|
||||
affected: set[str] = set()
|
||||
for file in files:
|
||||
if not file.endswith(CPP_FILE_EXTENSIONS):
|
||||
continue
|
||||
if file.startswith(ESPHOME_TESTS_COMPONENTS_PATH):
|
||||
parts = file.split("/")
|
||||
if len(parts) >= 4:
|
||||
component_dir = Path(ESPHOME_TESTS_COMPONENTS_PATH) / parts[2]
|
||||
if component_dir.is_dir():
|
||||
affected.add(parts[2])
|
||||
elif file.startswith(ESPHOME_COMPONENTS_PATH):
|
||||
parts = file.split("/")
|
||||
if len(parts) >= 4:
|
||||
component = parts[2]
|
||||
affected.update(find_children_of_component(components_graph, component))
|
||||
affected.add(component)
|
||||
return sorted(affected)
|
||||
|
||||
@@ -1,180 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
from collections.abc import Callable
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from helpers import changed_files, get_component_from_path, git_ls_files
|
||||
|
||||
from esphome.const import (
|
||||
KEY_CORE,
|
||||
KEY_TARGET_FRAMEWORK,
|
||||
KEY_TARGET_PLATFORM,
|
||||
PLATFORM_ESP32,
|
||||
PLATFORM_ESP8266,
|
||||
from helpers import (
|
||||
changed_files,
|
||||
filter_component_and_test_cpp_files,
|
||||
filter_component_and_test_files,
|
||||
get_all_component_files,
|
||||
get_components_with_dependencies,
|
||||
get_cpp_changed_components,
|
||||
)
|
||||
from esphome.core import CORE
|
||||
from esphome.loader import ComponentManifest, get_component, get_platform
|
||||
|
||||
|
||||
def filter_component_files(str):
|
||||
return str.startswith("esphome/components/") | str.startswith("tests/components/")
|
||||
|
||||
|
||||
def get_all_component_files() -> list[str]:
|
||||
"""Get all component files from git."""
|
||||
files = git_ls_files()
|
||||
return list(filter(filter_component_files, files))
|
||||
|
||||
|
||||
def extract_component_names_array_from_files_array(files):
|
||||
components = []
|
||||
for file in files:
|
||||
component_name = get_component_from_path(file)
|
||||
if component_name and component_name not in components:
|
||||
components.append(component_name)
|
||||
return components
|
||||
|
||||
|
||||
def add_item_to_components_graph(components_graph, parent, child):
|
||||
if not parent.startswith("__") and parent != child:
|
||||
if parent not in components_graph:
|
||||
components_graph[parent] = []
|
||||
if child not in components_graph[parent]:
|
||||
components_graph[parent].append(child)
|
||||
|
||||
|
||||
def resolve_auto_load(
|
||||
auto_load: list[str] | Callable[[], list[str]] | Callable[[dict | None], list[str]],
|
||||
config: dict | None = None,
|
||||
) -> list[str]:
|
||||
"""Resolve AUTO_LOAD to a list, handling callables with or without config parameter.
|
||||
|
||||
Args:
|
||||
auto_load: The AUTO_LOAD value (list or callable)
|
||||
config: Optional config to pass to callable AUTO_LOAD functions
|
||||
|
||||
Returns:
|
||||
List of component names to auto-load
|
||||
"""
|
||||
if not callable(auto_load):
|
||||
return auto_load
|
||||
|
||||
import inspect
|
||||
|
||||
if inspect.signature(auto_load).parameters:
|
||||
return auto_load(config)
|
||||
return auto_load()
|
||||
|
||||
|
||||
def create_components_graph():
|
||||
# The root directory of the repo
|
||||
root = Path(__file__).parent.parent
|
||||
components_dir = root / "esphome" / "components"
|
||||
# Fake some directory so that get_component works
|
||||
CORE.config_path = root
|
||||
# Various configuration to capture different outcomes used by `AUTO_LOAD` function.
|
||||
TARGET_CONFIGURATIONS = [
|
||||
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: None},
|
||||
{KEY_TARGET_FRAMEWORK: "arduino", KEY_TARGET_PLATFORM: None},
|
||||
{KEY_TARGET_FRAMEWORK: "esp-idf", KEY_TARGET_PLATFORM: None},
|
||||
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP32},
|
||||
{KEY_TARGET_FRAMEWORK: None, KEY_TARGET_PLATFORM: PLATFORM_ESP8266},
|
||||
]
|
||||
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
|
||||
|
||||
components_graph = {}
|
||||
platforms = []
|
||||
components: list[tuple[ComponentManifest, str, Path]] = []
|
||||
|
||||
for path in components_dir.iterdir():
|
||||
if not path.is_dir():
|
||||
continue
|
||||
if not (path / "__init__.py").is_file():
|
||||
continue
|
||||
name = path.name
|
||||
comp = get_component(name)
|
||||
if comp is None:
|
||||
print(
|
||||
f"Cannot find component {name}. Make sure current path is pip installed ESPHome"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
components.append((comp, name, path))
|
||||
if comp.is_platform_component:
|
||||
platforms.append(name)
|
||||
|
||||
platforms = set(platforms)
|
||||
|
||||
for comp, name, path in components:
|
||||
for dependency in comp.dependencies:
|
||||
add_item_to_components_graph(
|
||||
components_graph, dependency.split(".")[0], name
|
||||
)
|
||||
|
||||
for target_config in TARGET_CONFIGURATIONS:
|
||||
CORE.data[KEY_CORE] = target_config
|
||||
for item in resolve_auto_load(comp.auto_load, config=None):
|
||||
add_item_to_components_graph(components_graph, item, name)
|
||||
# restore config
|
||||
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
|
||||
|
||||
for platform_path in path.iterdir():
|
||||
platform_name = platform_path.stem
|
||||
if platform_name == name or platform_name not in platforms:
|
||||
continue
|
||||
platform = get_platform(platform_name, name)
|
||||
if platform is None:
|
||||
continue
|
||||
|
||||
add_item_to_components_graph(components_graph, platform_name, name)
|
||||
|
||||
for dependency in platform.dependencies:
|
||||
add_item_to_components_graph(
|
||||
components_graph, dependency.split(".")[0], name
|
||||
)
|
||||
|
||||
for target_config in TARGET_CONFIGURATIONS:
|
||||
CORE.data[KEY_CORE] = target_config
|
||||
for item in resolve_auto_load(platform.auto_load, config={}):
|
||||
add_item_to_components_graph(components_graph, item, name)
|
||||
# restore config
|
||||
CORE.data[KEY_CORE] = TARGET_CONFIGURATIONS[0]
|
||||
|
||||
return components_graph
|
||||
|
||||
|
||||
def find_children_of_component(components_graph, component_name, depth=0):
|
||||
if component_name not in components_graph:
|
||||
return []
|
||||
|
||||
children = []
|
||||
|
||||
for child in components_graph[component_name]:
|
||||
children.append(child)
|
||||
if depth < 10:
|
||||
children.extend(
|
||||
find_children_of_component(components_graph, child, depth + 1)
|
||||
)
|
||||
# Remove duplicate values
|
||||
return list(set(children))
|
||||
|
||||
|
||||
def get_components(files: list[str], get_dependencies: bool = False):
|
||||
components = extract_component_names_array_from_files_array(files)
|
||||
|
||||
if get_dependencies:
|
||||
components_graph = create_components_graph()
|
||||
|
||||
all_components = components.copy()
|
||||
for c in components:
|
||||
all_components.extend(find_children_of_component(components_graph, c))
|
||||
# Remove duplicate values
|
||||
all_changed_components = list(set(all_components))
|
||||
|
||||
return sorted(all_changed_components)
|
||||
|
||||
return sorted(components)
|
||||
|
||||
|
||||
def main():
|
||||
@@ -201,16 +35,29 @@ def main():
|
||||
parser.add_argument(
|
||||
"-b", "--branch", help="Branch to compare changed files against"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--cpp-changed",
|
||||
action="store_true",
|
||||
help="List components with changed C++ files",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.branch and not (
|
||||
args.changed or args.changed_direct or args.changed_with_deps
|
||||
args.changed
|
||||
or args.changed_direct
|
||||
or args.changed_with_deps
|
||||
or args.cpp_changed
|
||||
):
|
||||
parser.error(
|
||||
"--branch requires --changed, --changed-direct, or --changed-with-deps"
|
||||
"--branch requires --changed, --changed-direct, --changed-with-deps, or --cpp-changed"
|
||||
)
|
||||
|
||||
if args.changed or args.changed_direct or args.changed_with_deps:
|
||||
if (
|
||||
args.changed
|
||||
or args.changed_direct
|
||||
or args.changed_with_deps
|
||||
or args.cpp_changed
|
||||
):
|
||||
# When --changed* is passed, only get the changed files
|
||||
changed = changed_files(args.branch)
|
||||
|
||||
@@ -230,6 +77,11 @@ def main():
|
||||
# - --changed-with-deps: Used by CI test determination (script/determine-jobs.py)
|
||||
# Returns: Components with code changes + their dependencies (not infrastructure)
|
||||
# Reason: CI needs to test changed components and their dependents
|
||||
#
|
||||
# - --cpp-changed: Used by CI to determine if any C++ files changed (script/determine-jobs.py)
|
||||
# Returns: Only components with changed C++ files
|
||||
# Reason: Only components with C++ changes need C++ testing
|
||||
|
||||
base_test_changed = any(
|
||||
"tests/test_build_components" in file for file in changed
|
||||
)
|
||||
@@ -242,7 +94,7 @@ def main():
|
||||
# Only look at changed component files (ignore infrastructure changes)
|
||||
# For --changed-direct: only actual component code changes matter (for isolation)
|
||||
# For --changed-with-deps: only actual component code changes matter (for testing)
|
||||
files = [f for f in changed if filter_component_files(f)]
|
||||
files = [f for f in changed if filter_component_and_test_files(f)]
|
||||
else:
|
||||
# Get all component files
|
||||
files = get_all_component_files()
|
||||
@@ -251,8 +103,8 @@ def main():
|
||||
# Return JSON with both directly changed and all changed components
|
||||
import json
|
||||
|
||||
directly_changed = get_components(files, False)
|
||||
all_changed = get_components(files, True)
|
||||
directly_changed = get_components_with_dependencies(files, False)
|
||||
all_changed = get_components_with_dependencies(files, True)
|
||||
output = {
|
||||
"directly_changed": directly_changed,
|
||||
"all_changed": all_changed,
|
||||
@@ -260,11 +112,16 @@ def main():
|
||||
print(json.dumps(output))
|
||||
elif args.changed_direct:
|
||||
# Return only directly changed components (without dependencies)
|
||||
for c in get_components(files, False):
|
||||
for c in get_components_with_dependencies(files, False):
|
||||
print(c)
|
||||
elif args.cpp_changed:
|
||||
# Only look at changed cpp files
|
||||
files = list(filter(filter_component_and_test_cpp_files, changed))
|
||||
for c in get_cpp_changed_components(files):
|
||||
print(c)
|
||||
else:
|
||||
# Return all changed components (with dependencies) - default behavior
|
||||
for c in get_components(files, args.changed):
|
||||
for c in get_components_with_dependencies(files, args.changed):
|
||||
print(c)
|
||||
|
||||
|
||||
|
||||
@@ -966,11 +966,33 @@ def test_components(
|
||||
# Find all component tests
|
||||
all_tests = {}
|
||||
for pattern in component_patterns:
|
||||
# Skip empty patterns (happens when components list is empty string)
|
||||
if not pattern:
|
||||
continue
|
||||
all_tests.update(find_component_tests(tests_dir, pattern, base_only))
|
||||
|
||||
# If no components found, build a reference configuration for baseline comparison
|
||||
# Create a synthetic "empty" component test that will build just the base config
|
||||
if not all_tests:
|
||||
print(f"No components found matching: {component_patterns}")
|
||||
return 1
|
||||
print(
|
||||
"Building reference configuration with no components for baseline comparison..."
|
||||
)
|
||||
|
||||
# Create empty test files for each platform (or filtered platform)
|
||||
reference_tests: list[Path] = []
|
||||
for platform_name, base_file in platform_bases.items():
|
||||
if platform_filter and not platform_name.startswith(platform_filter):
|
||||
continue
|
||||
# Create an empty test file named to match the platform
|
||||
empty_test_file = build_dir / f"reference.{platform_name}.yaml"
|
||||
empty_test_file.write_text(
|
||||
"# Empty component test for baseline reference\n"
|
||||
)
|
||||
reference_tests.append(empty_test_file)
|
||||
|
||||
# Add to all_tests dict with component name "reference"
|
||||
all_tests["reference"] = reference_tests
|
||||
|
||||
print(f"Found {len(all_tests)} components to test")
|
||||
|
||||
|
||||
5
tests/components/.gitignore
vendored
Normal file
5
tests/components/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Gitignore settings for ESPHome
|
||||
# This is an example and may include too much for your use-case.
|
||||
# You can modify this file to suit your needs.
|
||||
/.esphome/
|
||||
/secrets.yaml
|
||||
32
tests/components/README.md
Normal file
32
tests/components/README.md
Normal file
@@ -0,0 +1,32 @@
|
||||
# How to write C++ ESPHome unit tests
|
||||
|
||||
1. Locate the folder with your component or create a new one with the same name as the component.
|
||||
2. Write the tests. You can add as many `.cpp` and `.h` files as you need to organize your tests.
|
||||
|
||||
**IMPORTANT**: wrap all your testing code in a unique namespace to avoid linker collisions when compiling
|
||||
testing binaries that combine many components. By convention, this unique namespace is `esphome::component::testing`
|
||||
(where "component" is the component under test), for example: `esphome::uart::testing`.
|
||||
|
||||
|
||||
## Running component unit tests
|
||||
|
||||
(from the repository root)
|
||||
```bash
|
||||
./script/cpp_unit_test.py component1 component2 ...
|
||||
```
|
||||
|
||||
The above will compile and run the provided components and their tests.
|
||||
|
||||
To run all tests, you can invoke `cpp_unit_test.py` with the special `--all` flag:
|
||||
|
||||
```bash
|
||||
./script/cpp_unit_test.py --all
|
||||
```
|
||||
|
||||
To run a specific test suite, you can provide a Google Test filter:
|
||||
|
||||
```bash
|
||||
GTEST_FILTER='UART*' ./script/cpp_unit_test.py uart modbus
|
||||
```
|
||||
|
||||
The process will return `0` for success or nonzero for failure. In case of failure, the errors will be printed out to the console.
|
||||
@@ -37,3 +37,102 @@ binary_sensor:
|
||||
format: "New state is %s"
|
||||
args: ['x.has_value() ? ONOFF(x) : "Unknown"']
|
||||
- binary_sensor.invalidate_state: some_binary_sensor
|
||||
|
||||
# Test autorepeat with default configuration (no timings)
|
||||
- platform: template
|
||||
id: autorepeat_default
|
||||
name: "Autorepeat Default"
|
||||
filters:
|
||||
- autorepeat:
|
||||
|
||||
# Test autorepeat with single timing entry
|
||||
- platform: template
|
||||
id: autorepeat_single
|
||||
name: "Autorepeat Single"
|
||||
filters:
|
||||
- autorepeat:
|
||||
- delay: 2s
|
||||
time_off: 200ms
|
||||
time_on: 800ms
|
||||
|
||||
# Test autorepeat with three timing entries
|
||||
- platform: template
|
||||
id: autorepeat_multiple
|
||||
name: "Autorepeat Multiple"
|
||||
filters:
|
||||
- autorepeat:
|
||||
- delay: 500ms
|
||||
time_off: 50ms
|
||||
time_on: 950ms
|
||||
- delay: 2s
|
||||
time_off: 100ms
|
||||
time_on: 900ms
|
||||
- delay: 10s
|
||||
time_off: 200ms
|
||||
time_on: 800ms
|
||||
|
||||
# Test on_multi_click with single click
|
||||
- platform: template
|
||||
id: multi_click_single
|
||||
name: "Multi Click Single"
|
||||
on_multi_click:
|
||||
- timing:
|
||||
- state: true
|
||||
min_length: 50ms
|
||||
max_length: 350ms
|
||||
then:
|
||||
- logger.log: "Single click detected"
|
||||
|
||||
# Test on_multi_click with double click
|
||||
- platform: template
|
||||
id: multi_click_double
|
||||
name: "Multi Click Double"
|
||||
on_multi_click:
|
||||
- timing:
|
||||
- state: true
|
||||
min_length: 50ms
|
||||
max_length: 350ms
|
||||
- state: false
|
||||
min_length: 50ms
|
||||
max_length: 350ms
|
||||
- state: true
|
||||
min_length: 50ms
|
||||
max_length: 350ms
|
||||
then:
|
||||
- logger.log: "Double click detected"
|
||||
|
||||
# Test on_multi_click with complex pattern (5 events)
|
||||
- platform: template
|
||||
id: multi_click_complex
|
||||
name: "Multi Click Complex"
|
||||
on_multi_click:
|
||||
- timing:
|
||||
- state: true
|
||||
min_length: 50ms
|
||||
max_length: 350ms
|
||||
- state: false
|
||||
min_length: 50ms
|
||||
max_length: 350ms
|
||||
- state: true
|
||||
min_length: 50ms
|
||||
max_length: 350ms
|
||||
- state: false
|
||||
min_length: 50ms
|
||||
max_length: 350ms
|
||||
- state: true
|
||||
min_length: 50ms
|
||||
then:
|
||||
- logger.log: "Complex pattern detected"
|
||||
|
||||
# Test on_multi_click with custom invalid_cooldown
|
||||
- platform: template
|
||||
id: multi_click_cooldown
|
||||
name: "Multi Click Cooldown"
|
||||
on_multi_click:
|
||||
- timing:
|
||||
- state: true
|
||||
min_length: 100ms
|
||||
max_length: 500ms
|
||||
invalid_cooldown: 2s
|
||||
then:
|
||||
- logger.log: "Click with custom cooldown"
|
||||
|
||||
@@ -12,3 +12,20 @@ switch:
|
||||
- platform: gpio
|
||||
pin: ${switch_pin}
|
||||
id: gpio_switch
|
||||
|
||||
- platform: gpio
|
||||
pin: ${switch_pin_2}
|
||||
id: gpio_switch_interlock_1
|
||||
interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3]
|
||||
interlock_wait_time: 100ms
|
||||
|
||||
- platform: gpio
|
||||
pin: ${switch_pin_3}
|
||||
id: gpio_switch_interlock_2
|
||||
interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3]
|
||||
|
||||
- platform: gpio
|
||||
pin: ${switch_pin_4}
|
||||
id: gpio_switch_interlock_3
|
||||
interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2]
|
||||
interlock_wait_time: 50ms
|
||||
|
||||
@@ -2,5 +2,8 @@ substitutions:
|
||||
binary_sensor_pin: GPIO2
|
||||
output_pin: GPIO3
|
||||
switch_pin: GPIO4
|
||||
switch_pin_2: GPIO5
|
||||
switch_pin_3: GPIO6
|
||||
switch_pin_4: GPIO7
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -2,5 +2,8 @@ substitutions:
|
||||
binary_sensor_pin: GPIO12
|
||||
output_pin: GPIO13
|
||||
switch_pin: GPIO14
|
||||
switch_pin_2: GPIO15
|
||||
switch_pin_3: GPIO16
|
||||
switch_pin_4: GPIO17
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -2,5 +2,8 @@ substitutions:
|
||||
binary_sensor_pin: GPIO0
|
||||
output_pin: GPIO2
|
||||
switch_pin: GPIO15
|
||||
switch_pin_2: GPIO12
|
||||
switch_pin_3: GPIO13
|
||||
switch_pin_4: GPIO14
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -12,3 +12,20 @@ switch:
|
||||
- platform: gpio
|
||||
pin: P1.2
|
||||
id: gpio_switch
|
||||
|
||||
- platform: gpio
|
||||
pin: P1.3
|
||||
id: gpio_switch_interlock_1
|
||||
interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3]
|
||||
interlock_wait_time: 100ms
|
||||
|
||||
- platform: gpio
|
||||
pin: P1.4
|
||||
id: gpio_switch_interlock_2
|
||||
interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3]
|
||||
|
||||
- platform: gpio
|
||||
pin: P1.5
|
||||
id: gpio_switch_interlock_3
|
||||
interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2]
|
||||
interlock_wait_time: 50ms
|
||||
|
||||
@@ -12,3 +12,20 @@ switch:
|
||||
- platform: gpio
|
||||
pin: P1.2
|
||||
id: gpio_switch
|
||||
|
||||
- platform: gpio
|
||||
pin: P1.3
|
||||
id: gpio_switch_interlock_1
|
||||
interlock: [gpio_switch_interlock_2, gpio_switch_interlock_3]
|
||||
interlock_wait_time: 100ms
|
||||
|
||||
- platform: gpio
|
||||
pin: P1.4
|
||||
id: gpio_switch_interlock_2
|
||||
interlock: [gpio_switch_interlock_1, gpio_switch_interlock_3]
|
||||
|
||||
- platform: gpio
|
||||
pin: P1.5
|
||||
id: gpio_switch_interlock_3
|
||||
interlock: [gpio_switch_interlock_1, gpio_switch_interlock_2]
|
||||
interlock_wait_time: 50ms
|
||||
|
||||
@@ -2,5 +2,8 @@ substitutions:
|
||||
binary_sensor_pin: GPIO2
|
||||
output_pin: GPIO3
|
||||
switch_pin: GPIO4
|
||||
switch_pin_2: GPIO5
|
||||
switch_pin_3: GPIO6
|
||||
switch_pin_4: GPIO7
|
||||
|
||||
<<: !include common.yaml
|
||||
|
||||
@@ -17,6 +17,20 @@ esphome:
|
||||
relative_brightness: 5%
|
||||
brightness_limits:
|
||||
max_brightness: 90%
|
||||
- light.turn_on:
|
||||
id: test_addressable_transition
|
||||
brightness: 50%
|
||||
red: 100%
|
||||
green: 0%
|
||||
blue: 0%
|
||||
transition_length: 500ms
|
||||
- light.turn_on:
|
||||
id: test_addressable_transition
|
||||
brightness: 100%
|
||||
red: 0%
|
||||
green: 100%
|
||||
blue: 0%
|
||||
transition_length: 1s
|
||||
|
||||
light:
|
||||
- platform: binary
|
||||
@@ -123,3 +137,49 @@ light:
|
||||
red: 100%
|
||||
green: 50%
|
||||
blue: 50%
|
||||
# Test StrobeLightEffect with multiple colors
|
||||
- platform: monochromatic
|
||||
id: test_strobe_multiple
|
||||
name: Strobe Multiple Colors
|
||||
output: test_ledc_1
|
||||
effects:
|
||||
- strobe:
|
||||
name: Strobe Multi
|
||||
colors:
|
||||
- state: true
|
||||
brightness: 100%
|
||||
duration: 500ms
|
||||
- state: false
|
||||
duration: 250ms
|
||||
- state: true
|
||||
brightness: 50%
|
||||
duration: 500ms
|
||||
# Test StrobeLightEffect with transition
|
||||
- platform: rgb
|
||||
id: test_strobe_transition
|
||||
name: Strobe With Transition
|
||||
red: test_ledc_1
|
||||
green: test_ledc_2
|
||||
blue: test_ledc_3
|
||||
effects:
|
||||
- strobe:
|
||||
name: Strobe Transition
|
||||
colors:
|
||||
- state: true
|
||||
red: 100%
|
||||
green: 0%
|
||||
blue: 0%
|
||||
duration: 1s
|
||||
transition_length: 500ms
|
||||
- state: true
|
||||
red: 0%
|
||||
green: 100%
|
||||
blue: 0%
|
||||
duration: 1s
|
||||
transition_length: 500ms
|
||||
- platform: partition
|
||||
id: test_addressable_transition
|
||||
name: Addressable Transition Test
|
||||
default_transition_length: 1s
|
||||
segments:
|
||||
- single_light_id: test_rgb_light
|
||||
|
||||
26
tests/components/main.cpp
Normal file
26
tests/components/main.cpp
Normal file
@@ -0,0 +1,26 @@
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
/*
|
||||
This special main.cpp replaces the default one.
|
||||
It will run all the Google Tests found in all compiled cpp files and then exit with the result
|
||||
See README.md for more information
|
||||
*/
|
||||
|
||||
// Auto generated code by esphome
|
||||
// ========== AUTO GENERATED INCLUDE BLOCK BEGIN ===========
|
||||
// ========== AUTO GENERATED INCLUDE BLOCK END ==========="
|
||||
|
||||
void original_setup() {
|
||||
// This function won't be run.
|
||||
|
||||
// ========== AUTO GENERATED CODE BEGIN ===========
|
||||
// =========== AUTO GENERATED CODE END ============
|
||||
}
|
||||
|
||||
void setup() {
|
||||
::testing::InitGoogleTest();
|
||||
int exit_code = RUN_ALL_TESTS();
|
||||
exit(exit_code);
|
||||
}
|
||||
|
||||
void loop() {}
|
||||
@@ -173,3 +173,66 @@ sensor:
|
||||
timeout: 1000ms
|
||||
value: [42.0]
|
||||
- multiply: 2.0
|
||||
|
||||
# CalibrateLinearFilter - piecewise linear calibration
|
||||
- platform: copy
|
||||
source_id: source_sensor
|
||||
name: "Calibrate Linear Two Points"
|
||||
filters:
|
||||
- calibrate_linear:
|
||||
- 0.0 -> 0.0
|
||||
- 100.0 -> 100.0
|
||||
|
||||
- platform: copy
|
||||
source_id: source_sensor
|
||||
name: "Calibrate Linear Multiple Segments"
|
||||
filters:
|
||||
- calibrate_linear:
|
||||
- 0.0 -> 0.0
|
||||
- 50.0 -> 55.0
|
||||
- 100.0 -> 102.5
|
||||
|
||||
- platform: copy
|
||||
source_id: source_sensor
|
||||
name: "Calibrate Linear Least Squares"
|
||||
filters:
|
||||
- calibrate_linear:
|
||||
method: least_squares
|
||||
datapoints:
|
||||
- 0.0 -> 0.0
|
||||
- 50.0 -> 55.0
|
||||
- 100.0 -> 102.5
|
||||
|
||||
# CalibratePolynomialFilter - polynomial calibration
|
||||
- platform: copy
|
||||
source_id: source_sensor
|
||||
name: "Calibrate Polynomial Degree 2"
|
||||
filters:
|
||||
- calibrate_polynomial:
|
||||
degree: 2
|
||||
datapoints:
|
||||
- 0.0 -> 0.0
|
||||
- 50.0 -> 55.0
|
||||
- 100.0 -> 102.5
|
||||
|
||||
- platform: copy
|
||||
source_id: source_sensor
|
||||
name: "Calibrate Polynomial Degree 3"
|
||||
filters:
|
||||
- calibrate_polynomial:
|
||||
degree: 3
|
||||
datapoints:
|
||||
- 0.0 -> 0.0
|
||||
- 25.0 -> 26.0
|
||||
- 50.0 -> 55.0
|
||||
- 100.0 -> 102.5
|
||||
|
||||
# OrFilter - filter branching
|
||||
- platform: copy
|
||||
source_id: source_sensor
|
||||
name: "Or Filter with Multiple Branches"
|
||||
filters:
|
||||
- or:
|
||||
- multiply: 2.0
|
||||
- offset: 10.0
|
||||
- lambda: return x * 3.0;
|
||||
|
||||
@@ -101,6 +101,9 @@ sensor:
|
||||
- filter_out: 10
|
||||
- filter_out: !lambda return NAN;
|
||||
- heartbeat: 5s
|
||||
- heartbeat:
|
||||
period: 5s
|
||||
optimistic: true
|
||||
- lambda: return x * (9.0/5.0) + 32.0;
|
||||
- max:
|
||||
window_size: 10
|
||||
|
||||
66
tests/components/text_sensor/common.yaml
Normal file
66
tests/components/text_sensor/common.yaml
Normal file
@@ -0,0 +1,66 @@
|
||||
text_sensor:
|
||||
- platform: template
|
||||
name: "Test Substitute Single"
|
||||
id: test_substitute_single
|
||||
filters:
|
||||
- substitute:
|
||||
- ERROR -> Error
|
||||
|
||||
- platform: template
|
||||
name: "Test Substitute Multiple"
|
||||
id: test_substitute_multiple
|
||||
filters:
|
||||
- substitute:
|
||||
- ERROR -> Error
|
||||
- WARN -> Warning
|
||||
- INFO -> Information
|
||||
- DEBUG -> Debug
|
||||
|
||||
- platform: template
|
||||
name: "Test Substitute Chained"
|
||||
id: test_substitute_chained
|
||||
filters:
|
||||
- substitute:
|
||||
- foo -> bar
|
||||
- to_upper
|
||||
- substitute:
|
||||
- BAR -> baz
|
||||
|
||||
- platform: template
|
||||
name: "Test Map Single"
|
||||
id: test_map_single
|
||||
filters:
|
||||
- map:
|
||||
- ON -> Active
|
||||
|
||||
- platform: template
|
||||
name: "Test Map Multiple"
|
||||
id: test_map_multiple
|
||||
filters:
|
||||
- map:
|
||||
- ON -> Active
|
||||
- OFF -> Inactive
|
||||
- UNKNOWN -> Error
|
||||
- IDLE -> Standby
|
||||
|
||||
- platform: template
|
||||
name: "Test Map Passthrough"
|
||||
id: test_map_passthrough
|
||||
filters:
|
||||
- map:
|
||||
- Good -> Excellent
|
||||
- Bad -> Poor
|
||||
|
||||
- platform: template
|
||||
name: "Test All Filters"
|
||||
id: test_all_filters
|
||||
filters:
|
||||
- to_upper
|
||||
- to_lower
|
||||
- append: " suffix"
|
||||
- prepend: "prefix "
|
||||
- substitute:
|
||||
- prefix -> PREFIX
|
||||
- suffix -> SUFFIX
|
||||
- map:
|
||||
- PREFIX text SUFFIX -> mapped
|
||||
1
tests/components/text_sensor/test.esp8266-ard.yaml
Normal file
1
tests/components/text_sensor/test.esp8266-ard.yaml
Normal file
@@ -0,0 +1 @@
|
||||
<<: !include common.yaml
|
||||
37
tests/components/uart/common.h
Normal file
37
tests/components/uart/common.h
Normal file
@@ -0,0 +1,37 @@
|
||||
#pragma once
|
||||
#include <vector>
|
||||
#include <cstdint>
|
||||
#include <cstring>
|
||||
#include <gmock/gmock.h>
|
||||
#include <gtest/gtest.h>
|
||||
#include "esphome/components/uart/uart_component.h"
|
||||
|
||||
namespace esphome::uart::testing {
|
||||
|
||||
using ::testing::_;
|
||||
using ::testing::Return;
|
||||
using ::testing::SaveArg;
|
||||
using ::testing::DoAll;
|
||||
using ::testing::Invoke;
|
||||
using ::testing::SetArgPointee;
|
||||
|
||||
// Derive a mock from UARTComponent to test the wrapper implementations.
|
||||
class MockUARTComponent : public UARTComponent {
|
||||
public:
|
||||
using UARTComponent::write_array;
|
||||
using UARTComponent::write_byte;
|
||||
|
||||
// NOTE: std::vector is used here for test convenience. For production code,
|
||||
// consider using StaticVector or FixedVector from esphome/core/helpers.h instead.
|
||||
std::vector<uint8_t> written_data;
|
||||
|
||||
void write_array(const uint8_t *data, size_t len) override { written_data.assign(data, data + len); }
|
||||
|
||||
MOCK_METHOD(bool, read_array, (uint8_t * data, size_t len), (override));
|
||||
MOCK_METHOD(bool, peek_byte, (uint8_t * data), (override));
|
||||
MOCK_METHOD(int, available, (), (override));
|
||||
MOCK_METHOD(void, flush, (), (override));
|
||||
MOCK_METHOD(void, check_logger_conflict, (), (override));
|
||||
};
|
||||
|
||||
} // namespace esphome::uart::testing
|
||||
73
tests/components/uart/uart_component.cpp
Normal file
73
tests/components/uart/uart_component.cpp
Normal file
@@ -0,0 +1,73 @@
|
||||
#include "common.h"
|
||||
|
||||
namespace esphome::uart::testing {
|
||||
|
||||
TEST(UARTComponentTest, SetGetBaudRate) {
|
||||
MockUARTComponent mock;
|
||||
mock.set_baud_rate(38400);
|
||||
EXPECT_EQ(mock.get_baud_rate(), 38400);
|
||||
}
|
||||
|
||||
TEST(UARTComponentTest, SetGetStopBits) {
|
||||
MockUARTComponent mock;
|
||||
mock.set_stop_bits(2);
|
||||
EXPECT_EQ(mock.get_stop_bits(), 2);
|
||||
}
|
||||
|
||||
TEST(UARTComponentTest, SetGetDataBits) {
|
||||
MockUARTComponent mock;
|
||||
mock.set_data_bits(7);
|
||||
EXPECT_EQ(mock.get_data_bits(), 7);
|
||||
}
|
||||
|
||||
TEST(UARTComponentTest, SetGetParity) {
|
||||
MockUARTComponent mock;
|
||||
mock.set_parity(UARTParityOptions::UART_CONFIG_PARITY_EVEN);
|
||||
EXPECT_EQ(mock.get_parity(), UARTParityOptions::UART_CONFIG_PARITY_EVEN);
|
||||
}
|
||||
|
||||
TEST(UARTComponentTest, SetGetRxBufferSize) {
|
||||
MockUARTComponent mock;
|
||||
mock.set_rx_buffer_size(128);
|
||||
EXPECT_EQ(mock.get_rx_buffer_size(), 128);
|
||||
}
|
||||
|
||||
TEST(UARTComponentTest, WriteArrayVector) {
|
||||
MockUARTComponent mock;
|
||||
std::vector<uint8_t> data = {10, 20, 30};
|
||||
mock.write_array(data);
|
||||
EXPECT_EQ(mock.written_data, data);
|
||||
}
|
||||
TEST(UARTComponentTest, WriteByte) {
|
||||
MockUARTComponent mock;
|
||||
uint8_t byte = 0x79;
|
||||
mock.write_byte(byte);
|
||||
EXPECT_EQ(mock.written_data.size(), 1);
|
||||
EXPECT_EQ(mock.written_data[0], byte);
|
||||
}
|
||||
|
||||
TEST(UARTComponentTest, WriteStr) {
|
||||
MockUARTComponent mock;
|
||||
const char *str = "Hello";
|
||||
std::vector<uint8_t> captured;
|
||||
mock.write_str(str);
|
||||
EXPECT_EQ(mock.written_data.size(), strlen(str));
|
||||
EXPECT_EQ(0, strncmp(str, (const char *) mock.written_data.data(), mock.written_data.size()));
|
||||
}
|
||||
|
||||
// Tests for wrapper methods forwarding to pure virtual read_array
|
||||
TEST(UARTComponentTest, ReadByteSuccess) {
|
||||
MockUARTComponent mock;
|
||||
uint8_t value = 0;
|
||||
EXPECT_CALL(mock, read_array(&value, 1)).WillOnce(Return(true));
|
||||
EXPECT_TRUE(mock.read_byte(&value));
|
||||
}
|
||||
|
||||
TEST(UARTComponentTest, ReadByteFailure) {
|
||||
MockUARTComponent mock;
|
||||
uint8_t value = 0xFF;
|
||||
EXPECT_CALL(mock, read_array(&value, 1)).WillOnce(Return(false));
|
||||
EXPECT_FALSE(mock.read_byte(&value));
|
||||
}
|
||||
|
||||
} // namespace esphome::uart::testing
|
||||
108
tests/components/uart/uart_device.cpp
Normal file
108
tests/components/uart/uart_device.cpp
Normal file
@@ -0,0 +1,108 @@
|
||||
#include "common.h"
|
||||
#include "esphome/components/uart/uart.h"
|
||||
|
||||
namespace esphome::uart::testing {
|
||||
|
||||
TEST(UARTDeviceTest, ReadByteSuccess) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
uint8_t value = 0;
|
||||
EXPECT_CALL(mock, read_array(_, 1)).WillOnce(DoAll(SetArgPointee<0>(0x5A), Return(true)));
|
||||
bool result = dev.read_byte(&value);
|
||||
EXPECT_TRUE(result);
|
||||
EXPECT_EQ(value, 0x5A);
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, ReadByteFailure) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
uint8_t value = 0xFF;
|
||||
EXPECT_CALL(mock, read_array(_, 1)).WillOnce(Return(false));
|
||||
bool result = dev.read_byte(&value);
|
||||
EXPECT_FALSE(result);
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, PeekByteSuccess) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
uint8_t value = 0;
|
||||
EXPECT_CALL(mock, peek_byte(_)).WillOnce(DoAll(SetArgPointee<0>(0xA5), Return(true)));
|
||||
bool result = dev.peek_byte(&value);
|
||||
EXPECT_TRUE(result);
|
||||
EXPECT_EQ(value, 0xA5);
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, PeekByteFailure) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
uint8_t value = 0;
|
||||
EXPECT_CALL(mock, peek_byte(_)).WillOnce(Return(false));
|
||||
bool result = dev.peek_byte(&value);
|
||||
EXPECT_FALSE(result);
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, Available) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
EXPECT_CALL(mock, available()).WillOnce(Return(5));
|
||||
EXPECT_EQ(dev.available(), 5);
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, FlushCallsParent) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
EXPECT_CALL(mock, flush()).Times(1);
|
||||
dev.flush();
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, WriteByteForwardsToWriteArray) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
dev.write_byte(0xAB);
|
||||
EXPECT_EQ(mock.written_data.size(), 1);
|
||||
EXPECT_EQ(mock.written_data[0], 0xAB);
|
||||
}
|
||||
TEST(UARTDeviceTest, WriteArrayPointer) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
uint8_t data[3] = {1, 2, 3};
|
||||
dev.write_array(data, 3);
|
||||
EXPECT_EQ(mock.written_data.size(), 3);
|
||||
EXPECT_EQ(mock.written_data, std::vector(data, data + 3));
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, WriteArrayVector) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
std::vector<uint8_t> data = {4, 5, 6};
|
||||
dev.write_array(data);
|
||||
EXPECT_EQ(mock.written_data, data);
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, WriteArrayStdArray) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
std::array<uint8_t, 4> data = {7, 8, 9, 10};
|
||||
dev.write_array(data);
|
||||
EXPECT_EQ(mock.written_data.size(), data.size());
|
||||
EXPECT_EQ(mock.written_data, std::vector(data.begin(), data.end()));
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, WriteStrForwardsToWriteArray) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
const char *str = "ESPHome";
|
||||
dev.write_str(str);
|
||||
EXPECT_EQ(mock.written_data.size(), strlen(str));
|
||||
EXPECT_EQ(0, strncmp(str, (const char *) mock.written_data.data(), mock.written_data.size()));
|
||||
}
|
||||
|
||||
TEST(UARTDeviceTest, WriteStrEmptyString) {
|
||||
MockUARTComponent mock;
|
||||
UARTDevice dev(&mock);
|
||||
const char *str = "";
|
||||
dev.write_str(str);
|
||||
EXPECT_EQ(mock.written_data.size(), 0);
|
||||
}
|
||||
|
||||
} // namespace esphome::uart::testing
|
||||
@@ -12,5 +12,8 @@ esphome:
|
||||
- logger.log: "Failed to connect to WiFi!"
|
||||
|
||||
wifi:
|
||||
ssid: MySSID
|
||||
password: password1
|
||||
networks:
|
||||
- ssid: MySSID
|
||||
password: password1
|
||||
- ssid: MySSID2
|
||||
password: password2
|
||||
|
||||
112
tests/integration/fixtures/host_mode_climate_basic_state.yaml
Normal file
112
tests/integration/fixtures/host_mode_climate_basic_state.yaml
Normal file
@@ -0,0 +1,112 @@
|
||||
esphome:
|
||||
name: host-climate-test
|
||||
host:
|
||||
api:
|
||||
logger:
|
||||
|
||||
climate:
|
||||
- platform: thermostat
|
||||
id: dual_mode_thermostat
|
||||
name: Dual-mode Thermostat
|
||||
sensor: host_thermostat_temperature_sensor
|
||||
humidity_sensor: host_thermostat_humidity_sensor
|
||||
humidity_hysteresis: 1.0
|
||||
min_cooling_off_time: 20s
|
||||
min_cooling_run_time: 20s
|
||||
max_cooling_run_time: 30s
|
||||
supplemental_cooling_delta: 3.0
|
||||
min_heating_off_time: 20s
|
||||
min_heating_run_time: 20s
|
||||
max_heating_run_time: 30s
|
||||
supplemental_heating_delta: 3.0
|
||||
min_fanning_off_time: 20s
|
||||
min_fanning_run_time: 20s
|
||||
min_idle_time: 10s
|
||||
visual:
|
||||
min_humidity: 20%
|
||||
max_humidity: 70%
|
||||
min_temperature: 15.0
|
||||
max_temperature: 32.0
|
||||
temperature_step: 0.1
|
||||
default_preset: home
|
||||
preset:
|
||||
- name: "away"
|
||||
default_target_temperature_low: 18.0
|
||||
default_target_temperature_high: 24.0
|
||||
- name: "home"
|
||||
default_target_temperature_low: 18.0
|
||||
default_target_temperature_high: 24.0
|
||||
auto_mode:
|
||||
- logger.log: "AUTO mode set"
|
||||
heat_cool_mode:
|
||||
- logger.log: "HEAT_COOL mode set"
|
||||
cool_action:
|
||||
- switch.turn_on: air_cond
|
||||
supplemental_cooling_action:
|
||||
- switch.turn_on: air_cond_2
|
||||
heat_action:
|
||||
- switch.turn_on: heater
|
||||
supplemental_heating_action:
|
||||
- switch.turn_on: heater_2
|
||||
dry_action:
|
||||
- switch.turn_on: air_cond
|
||||
fan_only_action:
|
||||
- switch.turn_on: fan_only
|
||||
idle_action:
|
||||
- switch.turn_off: air_cond
|
||||
- switch.turn_off: air_cond_2
|
||||
- switch.turn_off: heater
|
||||
- switch.turn_off: heater_2
|
||||
- switch.turn_off: fan_only
|
||||
humidity_control_humidify_action:
|
||||
- switch.turn_on: humidifier
|
||||
humidity_control_off_action:
|
||||
- switch.turn_off: humidifier
|
||||
|
||||
sensor:
|
||||
- platform: template
|
||||
id: host_thermostat_humidity_sensor
|
||||
unit_of_measurement: °C
|
||||
accuracy_decimals: 2
|
||||
state_class: measurement
|
||||
force_update: true
|
||||
lambda: return 42.0;
|
||||
update_interval: 0.1s
|
||||
- platform: template
|
||||
id: host_thermostat_temperature_sensor
|
||||
unit_of_measurement: °C
|
||||
accuracy_decimals: 2
|
||||
state_class: measurement
|
||||
force_update: true
|
||||
lambda: return 22.0;
|
||||
update_interval: 0.1s
|
||||
|
||||
switch:
|
||||
- platform: template
|
||||
id: air_cond
|
||||
name: Air Conditioner
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: air_cond_2
|
||||
name: Air Conditioner 2
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: fan_only
|
||||
name: Fan
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: heater
|
||||
name: Heater
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: heater_2
|
||||
name: Heater 2
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: dehumidifier
|
||||
name: Dehumidifier
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: humidifier
|
||||
name: Humidifier
|
||||
optimistic: true
|
||||
108
tests/integration/fixtures/host_mode_climate_control.yaml
Normal file
108
tests/integration/fixtures/host_mode_climate_control.yaml
Normal file
@@ -0,0 +1,108 @@
|
||||
esphome:
|
||||
name: host-climate-test
|
||||
host:
|
||||
api:
|
||||
logger:
|
||||
|
||||
climate:
|
||||
- platform: thermostat
|
||||
id: dual_mode_thermostat
|
||||
name: Dual-mode Thermostat
|
||||
sensor: host_thermostat_temperature_sensor
|
||||
humidity_sensor: host_thermostat_humidity_sensor
|
||||
humidity_hysteresis: 1.0
|
||||
min_cooling_off_time: 20s
|
||||
min_cooling_run_time: 20s
|
||||
max_cooling_run_time: 30s
|
||||
supplemental_cooling_delta: 3.0
|
||||
min_heating_off_time: 20s
|
||||
min_heating_run_time: 20s
|
||||
max_heating_run_time: 30s
|
||||
supplemental_heating_delta: 3.0
|
||||
min_fanning_off_time: 20s
|
||||
min_fanning_run_time: 20s
|
||||
min_idle_time: 10s
|
||||
visual:
|
||||
min_humidity: 20%
|
||||
max_humidity: 70%
|
||||
min_temperature: 15.0
|
||||
max_temperature: 32.0
|
||||
temperature_step: 0.1
|
||||
default_preset: home
|
||||
preset:
|
||||
- name: "away"
|
||||
default_target_temperature_low: 18.0
|
||||
default_target_temperature_high: 24.0
|
||||
- name: "home"
|
||||
default_target_temperature_low: 18.0
|
||||
default_target_temperature_high: 24.0
|
||||
auto_mode:
|
||||
- logger.log: "AUTO mode set"
|
||||
heat_cool_mode:
|
||||
- logger.log: "HEAT_COOL mode set"
|
||||
cool_action:
|
||||
- switch.turn_on: air_cond
|
||||
supplemental_cooling_action:
|
||||
- switch.turn_on: air_cond_2
|
||||
heat_action:
|
||||
- switch.turn_on: heater
|
||||
supplemental_heating_action:
|
||||
- switch.turn_on: heater_2
|
||||
dry_action:
|
||||
- switch.turn_on: air_cond
|
||||
fan_only_action:
|
||||
- switch.turn_on: fan_only
|
||||
idle_action:
|
||||
- switch.turn_off: air_cond
|
||||
- switch.turn_off: air_cond_2
|
||||
- switch.turn_off: heater
|
||||
- switch.turn_off: heater_2
|
||||
- switch.turn_off: fan_only
|
||||
humidity_control_humidify_action:
|
||||
- switch.turn_on: humidifier
|
||||
humidity_control_off_action:
|
||||
- switch.turn_off: humidifier
|
||||
|
||||
sensor:
|
||||
- platform: template
|
||||
id: host_thermostat_humidity_sensor
|
||||
unit_of_measurement: °C
|
||||
accuracy_decimals: 2
|
||||
state_class: measurement
|
||||
force_update: true
|
||||
lambda: return 42.0;
|
||||
update_interval: 0.1s
|
||||
- platform: template
|
||||
id: host_thermostat_temperature_sensor
|
||||
unit_of_measurement: °C
|
||||
accuracy_decimals: 2
|
||||
state_class: measurement
|
||||
force_update: true
|
||||
lambda: return 22.0;
|
||||
update_interval: 0.1s
|
||||
|
||||
switch:
|
||||
- platform: template
|
||||
id: air_cond
|
||||
name: Air Conditioner
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: air_cond_2
|
||||
name: Air Conditioner 2
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: fan_only
|
||||
name: Fan
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: heater
|
||||
name: Heater
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: heater_2
|
||||
name: Heater 2
|
||||
optimistic: true
|
||||
- platform: template
|
||||
id: humidifier
|
||||
name: Humidifier
|
||||
optimistic: true
|
||||
@@ -210,7 +210,15 @@ sensor:
|
||||
name: "Test Sensor 50"
|
||||
lambda: return 50.0;
|
||||
update_interval: 0.1s
|
||||
# Temperature sensor for the thermostat
|
||||
# Sensors for the thermostat
|
||||
- platform: template
|
||||
name: "Humidity Sensor"
|
||||
id: humidity_sensor
|
||||
lambda: return 35.0;
|
||||
unit_of_measurement: "%"
|
||||
device_class: humidity
|
||||
state_class: measurement
|
||||
update_interval: 5s
|
||||
- platform: template
|
||||
name: "Temperature Sensor"
|
||||
id: temp_sensor
|
||||
@@ -295,6 +303,11 @@ valve:
|
||||
- logger.log: "Valve stopping"
|
||||
|
||||
output:
|
||||
- platform: template
|
||||
id: humidifier_output
|
||||
type: binary
|
||||
write_action:
|
||||
- logger.log: "Humidifier output changed"
|
||||
- platform: template
|
||||
id: heater_output
|
||||
type: binary
|
||||
@@ -305,18 +318,31 @@ output:
|
||||
type: binary
|
||||
write_action:
|
||||
- logger.log: "Cooler output changed"
|
||||
- platform: template
|
||||
id: fan_output
|
||||
type: binary
|
||||
write_action:
|
||||
- logger.log: "Fan output changed"
|
||||
|
||||
climate:
|
||||
- platform: thermostat
|
||||
name: "Test Thermostat"
|
||||
sensor: temp_sensor
|
||||
humidity_sensor: humidity_sensor
|
||||
default_preset: Home
|
||||
on_boot_restore_from: default_preset
|
||||
min_heating_off_time: 1s
|
||||
min_heating_run_time: 1s
|
||||
min_cooling_off_time: 1s
|
||||
min_cooling_run_time: 1s
|
||||
min_fan_mode_switching_time: 1s
|
||||
min_idle_time: 1s
|
||||
visual:
|
||||
min_humidity: 20%
|
||||
max_humidity: 70%
|
||||
min_temperature: 15.0
|
||||
max_temperature: 32.0
|
||||
temperature_step: 0.1
|
||||
heat_action:
|
||||
- output.turn_on: heater_output
|
||||
cool_action:
|
||||
@@ -324,6 +350,14 @@ climate:
|
||||
idle_action:
|
||||
- output.turn_off: heater_output
|
||||
- output.turn_off: cooler_output
|
||||
humidity_control_humidify_action:
|
||||
- output.turn_on: humidifier_output
|
||||
humidity_control_off_action:
|
||||
- output.turn_off: humidifier_output
|
||||
fan_mode_auto_action:
|
||||
- output.turn_off: fan_output
|
||||
fan_mode_on_action:
|
||||
- output.turn_on: fan_output
|
||||
preset:
|
||||
- name: Home
|
||||
default_target_temperature_low: 20
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
esphome:
|
||||
name: noise-key-test
|
||||
|
||||
host:
|
||||
|
||||
api:
|
||||
encryption:
|
||||
key: "zX9/JHxMKwpP0jUGsF0iESCm1wRvNgR6NkKVOhn7kSs="
|
||||
|
||||
logger:
|
||||
49
tests/integration/test_host_mode_climate_basic_state.py
Normal file
49
tests/integration/test_host_mode_climate_basic_state.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Integration test for Host mode with climate."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
import aioesphomeapi
|
||||
from aioesphomeapi import ClimateAction, ClimateMode, ClimatePreset, EntityState
|
||||
import pytest
|
||||
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_host_mode_climate_basic_state(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test basic climate state reporting."""
|
||||
loop = asyncio.get_running_loop()
|
||||
async with run_compiled(yaml_config), api_client_connected() as client:
|
||||
states: dict[int, EntityState] = {}
|
||||
climate_future: asyncio.Future[EntityState] = loop.create_future()
|
||||
|
||||
def on_state(state: EntityState) -> None:
|
||||
states[state.key] = state
|
||||
if (
|
||||
isinstance(state, aioesphomeapi.ClimateState)
|
||||
and not climate_future.done()
|
||||
):
|
||||
climate_future.set_result(state)
|
||||
|
||||
client.subscribe_states(on_state)
|
||||
|
||||
try:
|
||||
climate_state = await asyncio.wait_for(climate_future, timeout=5.0)
|
||||
except TimeoutError:
|
||||
pytest.fail("Climate state not received within 5 seconds")
|
||||
|
||||
assert isinstance(climate_state, aioesphomeapi.ClimateState)
|
||||
assert climate_state.mode == ClimateMode.OFF
|
||||
assert climate_state.action == ClimateAction.OFF
|
||||
assert climate_state.current_temperature == 22.0
|
||||
assert climate_state.target_temperature_low == 18.0
|
||||
assert climate_state.target_temperature_high == 24.0
|
||||
assert climate_state.preset == ClimatePreset.HOME
|
||||
assert climate_state.current_humidity == 42.0
|
||||
assert climate_state.target_humidity == 20.0
|
||||
76
tests/integration/test_host_mode_climate_control.py
Normal file
76
tests/integration/test_host_mode_climate_control.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Integration test for Host mode with climate."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
import aioesphomeapi
|
||||
from aioesphomeapi import ClimateInfo, ClimateMode, EntityState
|
||||
import pytest
|
||||
|
||||
from .state_utils import InitialStateHelper
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_host_mode_climate_control(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test climate mode control."""
|
||||
loop = asyncio.get_running_loop()
|
||||
async with run_compiled(yaml_config), api_client_connected() as client:
|
||||
states: dict[int, EntityState] = {}
|
||||
climate_future: asyncio.Future[EntityState] = loop.create_future()
|
||||
|
||||
def on_state(state: EntityState) -> None:
|
||||
states[state.key] = state
|
||||
if (
|
||||
isinstance(state, aioesphomeapi.ClimateState)
|
||||
and state.mode == ClimateMode.HEAT
|
||||
and state.target_temperature_low == 21.5
|
||||
and state.target_temperature_high == 26.5
|
||||
and not climate_future.done()
|
||||
):
|
||||
climate_future.set_result(state)
|
||||
|
||||
# Get entities and set up state synchronization
|
||||
entities, services = await client.list_entities_services()
|
||||
initial_state_helper = InitialStateHelper(entities)
|
||||
climate_infos = [e for e in entities if isinstance(e, ClimateInfo)]
|
||||
assert len(climate_infos) >= 1, "Expected at least 1 climate entity"
|
||||
|
||||
# Subscribe with the wrapper that filters initial states
|
||||
client.subscribe_states(initial_state_helper.on_state_wrapper(on_state))
|
||||
|
||||
# Wait for all initial states to be broadcast
|
||||
try:
|
||||
await initial_state_helper.wait_for_initial_states()
|
||||
except TimeoutError:
|
||||
pytest.fail("Timeout waiting for initial states")
|
||||
|
||||
test_climate = next(
|
||||
(c for c in climate_infos if c.name == "Dual-mode Thermostat"), None
|
||||
)
|
||||
assert test_climate is not None, (
|
||||
"Dual-mode Thermostat thermostat climate not found"
|
||||
)
|
||||
|
||||
# Adjust setpoints
|
||||
client.climate_command(
|
||||
test_climate.key,
|
||||
mode=ClimateMode.HEAT,
|
||||
target_temperature_low=21.5,
|
||||
target_temperature_high=26.5,
|
||||
)
|
||||
|
||||
try:
|
||||
climate_state = await asyncio.wait_for(climate_future, timeout=5.0)
|
||||
except TimeoutError:
|
||||
pytest.fail("Climate state not received within 5 seconds")
|
||||
|
||||
assert isinstance(climate_state, aioesphomeapi.ClimateState)
|
||||
assert climate_state.mode == ClimateMode.HEAT
|
||||
assert climate_state.target_temperature_low == 21.5
|
||||
assert climate_state.target_temperature_high == 26.5
|
||||
@@ -5,7 +5,10 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
|
||||
from aioesphomeapi import (
|
||||
ClimateFanMode,
|
||||
ClimateFeature,
|
||||
ClimateInfo,
|
||||
ClimateMode,
|
||||
DateInfo,
|
||||
DateState,
|
||||
DateTimeInfo,
|
||||
@@ -121,6 +124,46 @@ async def test_host_mode_many_entities(
|
||||
assert len(climate_infos) >= 1, "Expected at least 1 climate entity"
|
||||
|
||||
climate_info = climate_infos[0]
|
||||
|
||||
# Verify feature flags set as expected
|
||||
assert climate_info.feature_flags == (
|
||||
ClimateFeature.SUPPORTS_ACTION
|
||||
| ClimateFeature.SUPPORTS_CURRENT_HUMIDITY
|
||||
| ClimateFeature.SUPPORTS_CURRENT_TEMPERATURE
|
||||
| ClimateFeature.SUPPORTS_TWO_POINT_TARGET_TEMPERATURE
|
||||
| ClimateFeature.SUPPORTS_TARGET_HUMIDITY
|
||||
)
|
||||
|
||||
# Verify modes
|
||||
assert climate_info.supported_modes == [
|
||||
ClimateMode.OFF,
|
||||
ClimateMode.COOL,
|
||||
ClimateMode.HEAT,
|
||||
], f"Expected modes [OFF, COOL, HEAT], got {climate_info.supported_modes}"
|
||||
|
||||
# Verify visual parameters
|
||||
assert climate_info.visual_min_temperature == 15.0, (
|
||||
f"Expected min_temperature=15.0, got {climate_info.visual_min_temperature}"
|
||||
)
|
||||
assert climate_info.visual_max_temperature == 32.0, (
|
||||
f"Expected max_temperature=32.0, got {climate_info.visual_max_temperature}"
|
||||
)
|
||||
assert climate_info.visual_target_temperature_step == 0.1, (
|
||||
f"Expected temperature_step=0.1, got {climate_info.visual_target_temperature_step}"
|
||||
)
|
||||
assert climate_info.visual_min_humidity == 20.0, (
|
||||
f"Expected min_humidity=20.0, got {climate_info.visual_min_humidity}"
|
||||
)
|
||||
assert climate_info.visual_max_humidity == 70.0, (
|
||||
f"Expected max_humidity=70.0, got {climate_info.visual_max_humidity}"
|
||||
)
|
||||
|
||||
# Verify fan modes
|
||||
assert climate_info.supported_fan_modes == [
|
||||
ClimateFanMode.ON,
|
||||
ClimateFanMode.AUTO,
|
||||
], f"Expected fan modes [ON, AUTO], got {climate_info.supported_fan_modes}"
|
||||
|
||||
# Verify the thermostat has presets
|
||||
assert len(climate_info.supported_presets) > 0, (
|
||||
"Expected climate to have presets"
|
||||
|
||||
@@ -49,3 +49,42 @@ async def test_noise_encryption_key_protection(
|
||||
with pytest.raises(InvalidEncryptionKeyAPIError):
|
||||
async with api_client_connected(noise_psk=wrong_key) as client:
|
||||
await client.device_info()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_noise_encryption_key_clear_protection(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test that noise encryption key set in YAML cannot be changed via API."""
|
||||
# The key that's set in the YAML fixture
|
||||
noise_psk = "zX9/JHxMKwpP0jUGsF0iESCm1wRvNgR6NkKVOhn7kSs="
|
||||
|
||||
# Keep ESPHome process running throughout all tests
|
||||
async with run_compiled(yaml_config):
|
||||
# First connection - test key change attempt
|
||||
async with api_client_connected(noise_psk=noise_psk) as client:
|
||||
# Verify connection is established
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
|
||||
# Try to set a new encryption key via API
|
||||
new_key = b"" # Empty key to attempt to clear
|
||||
|
||||
# This should fail since key was set in YAML
|
||||
success = await client.noise_encryption_set_key(new_key)
|
||||
assert success is False
|
||||
|
||||
# Reconnect with the original key to verify it still works
|
||||
async with api_client_connected(noise_psk=noise_psk) as client:
|
||||
# Verify connection is still successful with original key
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "noise-key-test"
|
||||
|
||||
# Verify that connecting with a wrong key fails
|
||||
wrong_key = base64.b64encode(b"y" * 32).decode() # Different key
|
||||
with pytest.raises(InvalidEncryptionKeyAPIError):
|
||||
async with api_client_connected(noise_psk=wrong_key) as client:
|
||||
await client.device_info()
|
||||
|
||||
@@ -5,7 +5,6 @@ import importlib.util
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
from unittest.mock import Mock, call, patch
|
||||
|
||||
@@ -56,9 +55,9 @@ def mock_should_run_python_linters() -> Generator[Mock, None, None]:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_subprocess_run() -> Generator[Mock, None, None]:
|
||||
"""Mock subprocess.run for list-components.py calls."""
|
||||
with patch.object(determine_jobs.subprocess, "run") as mock:
|
||||
def mock_determine_cpp_unit_tests() -> Generator[Mock, None, None]:
|
||||
"""Mock determine_cpp_unit_tests from helpers."""
|
||||
with patch.object(determine_jobs, "determine_cpp_unit_tests") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@@ -82,8 +81,8 @@ def test_main_all_tests_should_run(
|
||||
mock_should_run_clang_tidy: Mock,
|
||||
mock_should_run_clang_format: Mock,
|
||||
mock_should_run_python_linters: Mock,
|
||||
mock_subprocess_run: Mock,
|
||||
mock_changed_files: Mock,
|
||||
mock_determine_cpp_unit_tests: Mock,
|
||||
capsys: pytest.CaptureFixture[str],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
@@ -95,18 +94,36 @@ def test_main_all_tests_should_run(
|
||||
mock_should_run_clang_tidy.return_value = True
|
||||
mock_should_run_clang_format.return_value = True
|
||||
mock_should_run_python_linters.return_value = True
|
||||
mock_determine_cpp_unit_tests.return_value = (False, ["wifi", "api", "sensor"])
|
||||
|
||||
# Mock list-components.py output (now returns JSON with --changed-with-deps)
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = json.dumps(
|
||||
{"directly_changed": ["wifi", "api"], "all_changed": ["wifi", "api", "sensor"]}
|
||||
)
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
# Mock changed_files to return non-component files (to avoid memory impact)
|
||||
# Memory impact only runs when component C++ files change
|
||||
mock_changed_files.return_value = [
|
||||
"esphome/config.py",
|
||||
"esphome/helpers.py",
|
||||
]
|
||||
|
||||
# Run main function with mocked argv
|
||||
with (
|
||||
patch("sys.argv", ["determine-jobs.py"]),
|
||||
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"get_changed_components",
|
||||
return_value=["wifi", "api", "sensor"],
|
||||
),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"filter_component_and_test_files",
|
||||
side_effect=lambda f: f.startswith("esphome/components/"),
|
||||
),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"get_components_with_dependencies",
|
||||
side_effect=lambda files, deps: (
|
||||
["wifi", "api"] if not deps else ["wifi", "api", "sensor"]
|
||||
),
|
||||
),
|
||||
):
|
||||
determine_jobs.main()
|
||||
|
||||
@@ -130,9 +147,11 @@ def test_main_all_tests_should_run(
|
||||
# changed_cpp_file_count should be present
|
||||
assert "changed_cpp_file_count" in output
|
||||
assert isinstance(output["changed_cpp_file_count"], int)
|
||||
# memory_impact should be present
|
||||
# memory_impact should be false (no component C++ files changed)
|
||||
assert "memory_impact" in output
|
||||
assert output["memory_impact"]["should_run"] == "false" # No files changed
|
||||
assert output["memory_impact"]["should_run"] == "false"
|
||||
assert output["cpp_unit_tests_run_all"] is False
|
||||
assert output["cpp_unit_tests_components"] == ["wifi", "api", "sensor"]
|
||||
|
||||
|
||||
def test_main_no_tests_should_run(
|
||||
@@ -140,8 +159,8 @@ def test_main_no_tests_should_run(
|
||||
mock_should_run_clang_tidy: Mock,
|
||||
mock_should_run_clang_format: Mock,
|
||||
mock_should_run_python_linters: Mock,
|
||||
mock_subprocess_run: Mock,
|
||||
mock_changed_files: Mock,
|
||||
mock_determine_cpp_unit_tests: Mock,
|
||||
capsys: pytest.CaptureFixture[str],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
@@ -153,14 +172,22 @@ def test_main_no_tests_should_run(
|
||||
mock_should_run_clang_tidy.return_value = False
|
||||
mock_should_run_clang_format.return_value = False
|
||||
mock_should_run_python_linters.return_value = False
|
||||
mock_determine_cpp_unit_tests.return_value = (False, [])
|
||||
|
||||
# Mock empty list-components.py output
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []})
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
# Mock changed_files to return no component files
|
||||
mock_changed_files.return_value = []
|
||||
|
||||
# Run main function with mocked argv
|
||||
with patch("sys.argv", ["determine-jobs.py"]):
|
||||
with (
|
||||
patch("sys.argv", ["determine-jobs.py"]),
|
||||
patch.object(determine_jobs, "get_changed_components", return_value=[]),
|
||||
patch.object(
|
||||
determine_jobs, "filter_component_and_test_files", return_value=False
|
||||
),
|
||||
patch.object(
|
||||
determine_jobs, "get_components_with_dependencies", return_value=[]
|
||||
),
|
||||
):
|
||||
determine_jobs.main()
|
||||
|
||||
# Check output
|
||||
@@ -180,31 +207,8 @@ def test_main_no_tests_should_run(
|
||||
# memory_impact should be present
|
||||
assert "memory_impact" in output
|
||||
assert output["memory_impact"]["should_run"] == "false"
|
||||
|
||||
|
||||
def test_main_list_components_fails(
|
||||
mock_should_run_integration_tests: Mock,
|
||||
mock_should_run_clang_tidy: Mock,
|
||||
mock_should_run_clang_format: Mock,
|
||||
mock_should_run_python_linters: Mock,
|
||||
mock_subprocess_run: Mock,
|
||||
capsys: pytest.CaptureFixture[str],
|
||||
) -> None:
|
||||
"""Test when list-components.py fails."""
|
||||
mock_should_run_integration_tests.return_value = True
|
||||
mock_should_run_clang_tidy.return_value = True
|
||||
mock_should_run_clang_format.return_value = True
|
||||
mock_should_run_python_linters.return_value = True
|
||||
|
||||
# Mock list-components.py failure
|
||||
mock_subprocess_run.side_effect = subprocess.CalledProcessError(1, "cmd")
|
||||
|
||||
# Run main function with mocked argv - should raise
|
||||
with (
|
||||
patch("sys.argv", ["determine-jobs.py"]),
|
||||
pytest.raises(subprocess.CalledProcessError),
|
||||
):
|
||||
determine_jobs.main()
|
||||
assert output["cpp_unit_tests_run_all"] is False
|
||||
assert output["cpp_unit_tests_components"] == []
|
||||
|
||||
|
||||
def test_main_with_branch_argument(
|
||||
@@ -212,8 +216,8 @@ def test_main_with_branch_argument(
|
||||
mock_should_run_clang_tidy: Mock,
|
||||
mock_should_run_clang_format: Mock,
|
||||
mock_should_run_python_linters: Mock,
|
||||
mock_subprocess_run: Mock,
|
||||
mock_changed_files: Mock,
|
||||
mock_determine_cpp_unit_tests: Mock,
|
||||
capsys: pytest.CaptureFixture[str],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
@@ -225,17 +229,24 @@ def test_main_with_branch_argument(
|
||||
mock_should_run_clang_tidy.return_value = True
|
||||
mock_should_run_clang_format.return_value = False
|
||||
mock_should_run_python_linters.return_value = True
|
||||
mock_determine_cpp_unit_tests.return_value = (False, ["mqtt"])
|
||||
|
||||
# Mock list-components.py output
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = json.dumps(
|
||||
{"directly_changed": ["mqtt"], "all_changed": ["mqtt"]}
|
||||
)
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
# Mock changed_files to return non-component files (to avoid memory impact)
|
||||
# Memory impact only runs when component C++ files change
|
||||
mock_changed_files.return_value = ["esphome/config.py"]
|
||||
|
||||
with (
|
||||
patch("sys.argv", ["script.py", "-b", "main"]),
|
||||
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
|
||||
patch.object(determine_jobs, "get_changed_components", return_value=["mqtt"]),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"filter_component_and_test_files",
|
||||
side_effect=lambda f: f.startswith("esphome/components/"),
|
||||
),
|
||||
patch.object(
|
||||
determine_jobs, "get_components_with_dependencies", return_value=["mqtt"]
|
||||
),
|
||||
):
|
||||
determine_jobs.main()
|
||||
|
||||
@@ -245,13 +256,6 @@ def test_main_with_branch_argument(
|
||||
mock_should_run_clang_format.assert_called_once_with("main")
|
||||
mock_should_run_python_linters.assert_called_once_with("main")
|
||||
|
||||
# Check that list-components.py was called with branch
|
||||
mock_subprocess_run.assert_called_once()
|
||||
call_args = mock_subprocess_run.call_args[0][0]
|
||||
assert "--changed-with-deps" in call_args
|
||||
assert "-b" in call_args
|
||||
assert "main" in call_args
|
||||
|
||||
# Check output
|
||||
captured = capsys.readouterr()
|
||||
output = json.loads(captured.out)
|
||||
@@ -272,9 +276,11 @@ def test_main_with_branch_argument(
|
||||
# changed_cpp_file_count should be present
|
||||
assert "changed_cpp_file_count" in output
|
||||
assert isinstance(output["changed_cpp_file_count"], int)
|
||||
# memory_impact should be present
|
||||
# memory_impact should be false (no component C++ files changed)
|
||||
assert "memory_impact" in output
|
||||
assert output["memory_impact"]["should_run"] == "false"
|
||||
assert output["cpp_unit_tests_run_all"] is False
|
||||
assert output["cpp_unit_tests_components"] == ["mqtt"]
|
||||
|
||||
|
||||
def test_should_run_integration_tests(
|
||||
@@ -485,7 +491,6 @@ def test_main_filters_components_without_tests(
|
||||
mock_should_run_clang_tidy: Mock,
|
||||
mock_should_run_clang_format: Mock,
|
||||
mock_should_run_python_linters: Mock,
|
||||
mock_subprocess_run: Mock,
|
||||
mock_changed_files: Mock,
|
||||
capsys: pytest.CaptureFixture[str],
|
||||
tmp_path: Path,
|
||||
@@ -500,16 +505,11 @@ def test_main_filters_components_without_tests(
|
||||
mock_should_run_clang_format.return_value = False
|
||||
mock_should_run_python_linters.return_value = False
|
||||
|
||||
# Mock list-components.py output with 3 components
|
||||
# wifi: has tests, sensor: has tests, airthings_ble: no tests
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = json.dumps(
|
||||
{
|
||||
"directly_changed": ["wifi", "sensor"],
|
||||
"all_changed": ["wifi", "sensor", "airthings_ble"],
|
||||
}
|
||||
)
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
# Mock changed_files to return component files
|
||||
mock_changed_files.return_value = [
|
||||
"esphome/components/wifi/wifi.cpp",
|
||||
"esphome/components/sensor/sensor.h",
|
||||
]
|
||||
|
||||
# Create test directory structure
|
||||
tests_dir = tmp_path / "tests" / "components"
|
||||
@@ -533,6 +533,24 @@ def test_main_filters_components_without_tests(
|
||||
patch.object(determine_jobs, "root_path", str(tmp_path)),
|
||||
patch.object(helpers, "root_path", str(tmp_path)),
|
||||
patch("sys.argv", ["determine-jobs.py"]),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"get_changed_components",
|
||||
return_value=["wifi", "sensor", "airthings_ble"],
|
||||
),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"filter_component_and_test_files",
|
||||
side_effect=lambda f: f.startswith("esphome/components/"),
|
||||
),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"get_components_with_dependencies",
|
||||
side_effect=lambda files, deps: (
|
||||
["wifi", "sensor"] if not deps else ["wifi", "sensor", "airthings_ble"]
|
||||
),
|
||||
),
|
||||
patch.object(determine_jobs, "changed_files", return_value=[]),
|
||||
):
|
||||
# Clear the cache since we're mocking root_path
|
||||
determine_jobs._component_has_tests.cache_clear()
|
||||
@@ -775,7 +793,6 @@ def test_clang_tidy_mode_full_scan(
|
||||
mock_should_run_clang_tidy: Mock,
|
||||
mock_should_run_clang_format: Mock,
|
||||
mock_should_run_python_linters: Mock,
|
||||
mock_subprocess_run: Mock,
|
||||
mock_changed_files: Mock,
|
||||
capsys: pytest.CaptureFixture[str],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
@@ -788,15 +805,20 @@ def test_clang_tidy_mode_full_scan(
|
||||
mock_should_run_clang_format.return_value = False
|
||||
mock_should_run_python_linters.return_value = False
|
||||
|
||||
# Mock list-components.py output
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = json.dumps({"directly_changed": [], "all_changed": []})
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
# Mock changed_files to return no component files
|
||||
mock_changed_files.return_value = []
|
||||
|
||||
# Mock full scan (hash changed)
|
||||
with (
|
||||
patch("sys.argv", ["determine-jobs.py"]),
|
||||
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=True),
|
||||
patch.object(determine_jobs, "get_changed_components", return_value=[]),
|
||||
patch.object(
|
||||
determine_jobs, "filter_component_and_test_files", return_value=False
|
||||
),
|
||||
patch.object(
|
||||
determine_jobs, "get_components_with_dependencies", return_value=[]
|
||||
),
|
||||
):
|
||||
determine_jobs.main()
|
||||
|
||||
@@ -837,7 +859,6 @@ def test_clang_tidy_mode_targeted_scan(
|
||||
mock_should_run_clang_tidy: Mock,
|
||||
mock_should_run_clang_format: Mock,
|
||||
mock_should_run_python_linters: Mock,
|
||||
mock_subprocess_run: Mock,
|
||||
mock_changed_files: Mock,
|
||||
capsys: pytest.CaptureFixture[str],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
@@ -853,12 +874,10 @@ def test_clang_tidy_mode_targeted_scan(
|
||||
# Create component names
|
||||
components = [f"comp{i}" for i in range(component_count)]
|
||||
|
||||
# Mock list-components.py output
|
||||
mock_result = Mock()
|
||||
mock_result.stdout = json.dumps(
|
||||
{"directly_changed": components, "all_changed": components}
|
||||
)
|
||||
mock_subprocess_run.return_value = mock_result
|
||||
# Mock changed_files to return component files
|
||||
mock_changed_files.return_value = [
|
||||
f"esphome/components/{comp}/file.cpp" for comp in components
|
||||
]
|
||||
|
||||
# Mock git_ls_files to return files for each component
|
||||
cpp_files = {
|
||||
@@ -875,6 +894,15 @@ def test_clang_tidy_mode_targeted_scan(
|
||||
patch("sys.argv", ["determine-jobs.py"]),
|
||||
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
|
||||
patch.object(determine_jobs, "git_ls_files", side_effect=mock_git_ls_files),
|
||||
patch.object(determine_jobs, "get_changed_components", return_value=components),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"filter_component_and_test_files",
|
||||
side_effect=lambda f: f.startswith("esphome/components/"),
|
||||
),
|
||||
patch.object(
|
||||
determine_jobs, "get_components_with_dependencies", return_value=components
|
||||
),
|
||||
):
|
||||
determine_jobs.main()
|
||||
|
||||
@@ -882,3 +910,60 @@ def test_clang_tidy_mode_targeted_scan(
|
||||
output = json.loads(captured.out)
|
||||
|
||||
assert output["clang_tidy_mode"] == expected_mode
|
||||
|
||||
|
||||
def test_main_core_files_changed_still_detects_components(
|
||||
mock_should_run_integration_tests: Mock,
|
||||
mock_should_run_clang_tidy: Mock,
|
||||
mock_should_run_clang_format: Mock,
|
||||
mock_should_run_python_linters: Mock,
|
||||
mock_changed_files: Mock,
|
||||
mock_determine_cpp_unit_tests: Mock,
|
||||
capsys: pytest.CaptureFixture[str],
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""Test that component changes are detected even when core files change."""
|
||||
monkeypatch.delenv("GITHUB_ACTIONS", raising=False)
|
||||
|
||||
mock_should_run_integration_tests.return_value = True
|
||||
mock_should_run_clang_tidy.return_value = True
|
||||
mock_should_run_clang_format.return_value = True
|
||||
mock_should_run_python_linters.return_value = True
|
||||
mock_determine_cpp_unit_tests.return_value = (True, [])
|
||||
|
||||
mock_changed_files.return_value = [
|
||||
"esphome/core/helpers.h",
|
||||
"esphome/components/select/select_traits.h",
|
||||
"esphome/components/select/select_traits.cpp",
|
||||
"esphome/components/api/api.proto",
|
||||
]
|
||||
|
||||
with (
|
||||
patch("sys.argv", ["determine-jobs.py"]),
|
||||
patch.object(determine_jobs, "_is_clang_tidy_full_scan", return_value=False),
|
||||
patch.object(determine_jobs, "get_changed_components", return_value=None),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"filter_component_and_test_files",
|
||||
side_effect=lambda f: f.startswith("esphome/components/"),
|
||||
),
|
||||
patch.object(
|
||||
determine_jobs,
|
||||
"get_components_with_dependencies",
|
||||
side_effect=lambda files, deps: (
|
||||
["select", "api"]
|
||||
if not deps
|
||||
else ["select", "api", "bluetooth_proxy", "logger"]
|
||||
),
|
||||
),
|
||||
):
|
||||
determine_jobs.main()
|
||||
|
||||
captured = capsys.readouterr()
|
||||
output = json.loads(captured.out)
|
||||
|
||||
assert output["clang_tidy"] is True
|
||||
assert output["clang_tidy_mode"] == "split"
|
||||
assert "select" in output["changed_components"]
|
||||
assert "api" in output["changed_components"]
|
||||
assert len(output["changed_components"]) > 0
|
||||
|
||||
@@ -3,7 +3,7 @@ esphome:
|
||||
friendly_name: $component_name
|
||||
|
||||
esp8266:
|
||||
board: d1_mini
|
||||
board: d1_mini_pro
|
||||
|
||||
logger:
|
||||
level: VERY_VERBOSE
|
||||
|
||||
@@ -517,6 +517,35 @@ def test_include_file_cpp(tmp_path: Path, mock_copy_file_if_changed: Mock) -> No
|
||||
mock_cg.add_global.assert_not_called()
|
||||
|
||||
|
||||
def test_include_file_with_c_header(
|
||||
tmp_path: Path, mock_copy_file_if_changed: Mock
|
||||
) -> None:
|
||||
"""Test include_file wraps header in extern C block when is_c_header is True."""
|
||||
src_file = tmp_path / "c_library.h"
|
||||
src_file.write_text("// C library header")
|
||||
|
||||
CORE.build_path = tmp_path / "build"
|
||||
|
||||
with patch("esphome.core.config.cg") as mock_cg:
|
||||
# Mock RawStatement to capture the text
|
||||
mock_raw_statement = MagicMock()
|
||||
mock_raw_statement.text = ""
|
||||
|
||||
def raw_statement_side_effect(text):
|
||||
mock_raw_statement.text = text
|
||||
return mock_raw_statement
|
||||
|
||||
mock_cg.RawStatement.side_effect = raw_statement_side_effect
|
||||
|
||||
config.include_file(src_file, Path("c_library.h"), is_c_header=True)
|
||||
|
||||
mock_copy_file_if_changed.assert_called_once()
|
||||
mock_cg.add_global.assert_called_once()
|
||||
# Check that include statement is wrapped in extern "C" block
|
||||
assert 'extern "C"' in mock_raw_statement.text
|
||||
assert '#include "c_library.h"' in mock_raw_statement.text
|
||||
|
||||
|
||||
def test_get_usable_cpu_count() -> None:
|
||||
"""Test get_usable_cpu_count returns CPU count."""
|
||||
count = config.get_usable_cpu_count()
|
||||
|
||||
Reference in New Issue
Block a user