mirror of
https://github.com/esphome/esphome.git
synced 2025-08-10 20:29:24 +00:00
Merge branch 'integration' into memory_api
This commit is contained in:
@@ -391,8 +391,7 @@ async def build_action(full_config, template_arg, args):
|
||||
)
|
||||
action_id = full_config[CONF_TYPE_ID]
|
||||
builder = registry_entry.coroutine_fun
|
||||
ret = await builder(config, action_id, template_arg, args)
|
||||
return ret
|
||||
return await builder(config, action_id, template_arg, args)
|
||||
|
||||
|
||||
async def build_action_list(config, templ, arg_type):
|
||||
@@ -409,8 +408,7 @@ async def build_condition(full_config, template_arg, args):
|
||||
)
|
||||
action_id = full_config[CONF_TYPE_ID]
|
||||
builder = registry_entry.coroutine_fun
|
||||
ret = await builder(config, action_id, template_arg, args)
|
||||
return ret
|
||||
return await builder(config, action_id, template_arg, args)
|
||||
|
||||
|
||||
async def build_condition_list(config, templ, args):
|
||||
|
@@ -301,8 +301,7 @@ async def alarm_action_disarm_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def alarm_action_pending_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -310,8 +309,7 @@ async def alarm_action_pending_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def alarm_action_trigger_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -319,8 +317,7 @@ async def alarm_action_trigger_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def alarm_action_chime_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -333,8 +330,7 @@ async def alarm_action_chime_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def alarm_action_ready_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
@automation.register_condition(
|
||||
|
@@ -175,8 +175,7 @@ BLE_REMOVE_BOND_ACTION_SCHEMA = cv.Schema(
|
||||
)
|
||||
async def ble_disconnect_to_code(config, action_id, template_arg, args):
|
||||
parent = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, parent)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, parent)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -184,8 +183,7 @@ async def ble_disconnect_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def ble_connect_to_code(config, action_id, template_arg, args):
|
||||
parent = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, parent)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, parent)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -282,9 +280,7 @@ async def passkey_reply_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def remove_bond_to_code(config, action_id, template_arg, args):
|
||||
parent = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, parent)
|
||||
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, parent)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
|
@@ -892,7 +892,7 @@ def get_arduino_partition_csv(flash_size):
|
||||
eeprom_partition_start = app1_partition_start + app_partition_size
|
||||
spiffs_partition_start = eeprom_partition_start + eeprom_partition_size
|
||||
|
||||
partition_csv = f"""\
|
||||
return f"""\
|
||||
nvs, data, nvs, 0x9000, 0x5000,
|
||||
otadata, data, ota, 0xE000, 0x2000,
|
||||
app0, app, ota_0, 0x{app0_partition_start:X}, 0x{app_partition_size:X},
|
||||
@@ -900,20 +900,18 @@ app1, app, ota_1, 0x{app1_partition_start:X}, 0x{app_partition_size:X},
|
||||
eeprom, data, 0x99, 0x{eeprom_partition_start:X}, 0x{eeprom_partition_size:X},
|
||||
spiffs, data, spiffs, 0x{spiffs_partition_start:X}, 0x{spiffs_partition_size:X}
|
||||
"""
|
||||
return partition_csv
|
||||
|
||||
|
||||
def get_idf_partition_csv(flash_size):
|
||||
app_partition_size = APP_PARTITION_SIZES[flash_size]
|
||||
|
||||
partition_csv = f"""\
|
||||
return f"""\
|
||||
otadata, data, ota, , 0x2000,
|
||||
phy_init, data, phy, , 0x1000,
|
||||
app0, app, ota_0, , 0x{app_partition_size:X},
|
||||
app1, app, ota_1, , 0x{app_partition_size:X},
|
||||
nvs, data, nvs, , 0x6D000,
|
||||
"""
|
||||
return partition_csv
|
||||
|
||||
|
||||
def _format_sdkconfig_val(value: SdkconfigValueType) -> str:
|
||||
|
@@ -187,8 +187,7 @@ def validate_supports(value):
|
||||
"Open-drain only works with output mode", [CONF_MODE, CONF_OPEN_DRAIN]
|
||||
)
|
||||
|
||||
value = _esp32_validations[variant].usage_validation(value)
|
||||
return value
|
||||
return _esp32_validations[variant].usage_validation(value)
|
||||
|
||||
|
||||
# https://docs.espressif.com/projects/esp-idf/en/v3.3.5/api-reference/peripherals/gpio.html#_CPPv416gpio_drive_cap_t
|
||||
|
@@ -628,5 +628,4 @@ async def ble_server_descriptor_set_value(config, action_id, template_arg, args)
|
||||
)
|
||||
async def ble_server_characteristic_notify(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
@@ -330,8 +330,7 @@ HAIER_HON_BASE_ACTION_SCHEMA = automation.maybe_simple_id(
|
||||
)
|
||||
async def display_action_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -342,8 +341,7 @@ async def display_action_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def beeper_action_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
# Start self cleaning or steri-cleaning action action
|
||||
@@ -359,8 +357,7 @@ async def beeper_action_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def start_cleaning_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
# Set vertical airflow direction action
|
||||
@@ -417,8 +414,7 @@ async def haier_set_horizontal_airflow_to_code(config, action_id, template_arg,
|
||||
)
|
||||
async def health_action_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -432,8 +428,7 @@ async def health_action_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def power_action_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
def _final_validate(config):
|
||||
|
@@ -353,10 +353,9 @@ async def addressable_lambda_effect_to_code(config, effect_id):
|
||||
(bool, "initial_run"),
|
||||
]
|
||||
lambda_ = await cg.process_lambda(config[CONF_LAMBDA], args, return_type=cg.void)
|
||||
var = cg.new_Pvariable(
|
||||
return cg.new_Pvariable(
|
||||
effect_id, config[CONF_NAME], lambda_, config[CONF_UPDATE_INTERVAL]
|
||||
)
|
||||
return var
|
||||
|
||||
|
||||
@register_addressable_effect(
|
||||
|
@@ -85,8 +85,7 @@ async def action_to_code(
|
||||
async with LambdaContext(parameters=args, where=action_id) as context:
|
||||
for widget in widgets:
|
||||
await action(widget)
|
||||
var = cg.new_Pvariable(action_id, template_arg, await context.get_lambda())
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, await context.get_lambda())
|
||||
|
||||
|
||||
async def update_to_code(config, action_id, template_arg, args):
|
||||
@@ -354,8 +353,7 @@ async def widget_focus(config, action_id, template_arg, args):
|
||||
|
||||
if config[CONF_FREEZE]:
|
||||
lv.group_focus_freeze(group, True)
|
||||
var = cg.new_Pvariable(action_id, template_arg, await context.get_lambda())
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, await context.get_lambda())
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
|
@@ -271,8 +271,7 @@ padding = LValidator(padding_validator, int32, retmapper=literal)
|
||||
|
||||
|
||||
def zoom_validator(value):
|
||||
value = cv.float_range(0.1, 10.0)(value)
|
||||
return value
|
||||
return cv.float_range(0.1, 10.0)(value)
|
||||
|
||||
|
||||
def zoom_retmapper(value):
|
||||
|
@@ -66,8 +66,7 @@ async def style_update_to_code(config, action_id, template_arg, args):
|
||||
async with LambdaContext(parameters=args, where=action_id) as context:
|
||||
await style_set(style, config)
|
||||
|
||||
var = cg.new_Pvariable(action_id, template_arg, await context.get_lambda())
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, await context.get_lambda())
|
||||
|
||||
|
||||
async def theme_to_code(config):
|
||||
|
@@ -189,7 +189,7 @@ class Widget:
|
||||
for matrix buttons
|
||||
:return:
|
||||
"""
|
||||
return None
|
||||
return
|
||||
|
||||
def get_max(self):
|
||||
return self.type.get_max(self.config)
|
||||
|
@@ -193,7 +193,7 @@ class ButtonMatrixType(WidgetType):
|
||||
async def to_code(self, w: Widget, config):
|
||||
lvgl_components_required.add("BUTTONMATRIX")
|
||||
if CONF_ROWS not in config:
|
||||
return []
|
||||
return
|
||||
text_list, ctrl_list, width_list, key_list = await get_button_data(
|
||||
config[CONF_ROWS], w
|
||||
)
|
||||
|
@@ -312,14 +312,13 @@ CONFIG_SCHEMA = cv.All(
|
||||
def exp_mqtt_message(config):
|
||||
if config is None:
|
||||
return cg.optional(cg.TemplateArguments(MQTTMessage))
|
||||
exp = cg.StructInitializer(
|
||||
return cg.StructInitializer(
|
||||
MQTTMessage,
|
||||
("topic", config[CONF_TOPIC]),
|
||||
("payload", config.get(CONF_PAYLOAD, "")),
|
||||
("qos", config[CONF_QOS]),
|
||||
("retain", config[CONF_RETAIN]),
|
||||
)
|
||||
return exp
|
||||
|
||||
|
||||
@coroutine_with_priority(40.0)
|
||||
|
@@ -18,13 +18,12 @@ def one_wire_device_schema():
|
||||
|
||||
:return: The 1-wire device schema, `extend` this in your config schema.
|
||||
"""
|
||||
schema = cv.Schema(
|
||||
return cv.Schema(
|
||||
{
|
||||
cv.GenerateID(CONF_ONE_WIRE_ID): cv.use_id(OneWireBus),
|
||||
cv.Optional(CONF_ADDRESS): cv.hex_uint64_t,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
||||
|
||||
async def register_one_wire_device(var, config):
|
||||
|
@@ -186,8 +186,7 @@ def _process_package(package_config, config):
|
||||
package_config = _process_base_package(package_config)
|
||||
if isinstance(package_config, dict):
|
||||
recursive_package = do_packages_pass(package_config)
|
||||
config = merge_config(recursive_package, config)
|
||||
return config
|
||||
return merge_config(recursive_package, config)
|
||||
|
||||
|
||||
def do_packages_pass(config: dict):
|
||||
|
@@ -114,8 +114,7 @@ PMWCS3_CALIBRATION_SCHEMA = cv.Schema(
|
||||
)
|
||||
async def pmwcs3_calibration_to_code(config, action_id, template_arg, args):
|
||||
parent = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, parent)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, parent)
|
||||
|
||||
|
||||
PMWCS3_NEW_I2C_ADDRESS_SCHEMA = cv.maybe_simple_value(
|
||||
|
@@ -136,8 +136,7 @@ RFBRIDGE_ID_SCHEMA = cv.Schema({cv.GenerateID(): cv.use_id(RFBridgeComponent)})
|
||||
@automation.register_action("rf_bridge.learn", RFBridgeLearnAction, RFBRIDGE_ID_SCHEMA)
|
||||
async def rf_bridge_learnx_to_code(config, action_id, template_args, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_args, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_args, paren)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -149,8 +148,7 @@ async def rf_bridge_start_advanced_sniffing_to_code(
|
||||
config, action_id, template_args, args
|
||||
):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_args, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_args, paren)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -162,8 +160,7 @@ async def rf_bridge_stop_advanced_sniffing_to_code(
|
||||
config, action_id, template_args, args
|
||||
):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_args, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_args, paren)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -175,8 +172,7 @@ async def rf_bridge_start_bucket_sniffing_to_code(
|
||||
config, action_id, template_args, args
|
||||
):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_args, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_args, paren)
|
||||
|
||||
|
||||
RFBRIDGE_SEND_ADVANCED_CODE_SCHEMA = cv.Schema(
|
||||
|
@@ -125,8 +125,7 @@ writezero:
|
||||
|
||||
def time_to_cycles(time_us):
|
||||
cycles_per_us = 57.5
|
||||
cycles = round(float(time_us) * cycles_per_us)
|
||||
return cycles
|
||||
return round(float(time_us) * cycles_per_us)
|
||||
|
||||
|
||||
CONF_PIO = "pio"
|
||||
|
@@ -256,6 +256,7 @@ OffsetFilter = sensor_ns.class_("OffsetFilter", Filter)
|
||||
MultiplyFilter = sensor_ns.class_("MultiplyFilter", Filter)
|
||||
FilterOutValueFilter = sensor_ns.class_("FilterOutValueFilter", Filter)
|
||||
ThrottleFilter = sensor_ns.class_("ThrottleFilter", Filter)
|
||||
ThrottleWithPriorityFilter = sensor_ns.class_("ThrottleWithPriorityFilter", Filter)
|
||||
TimeoutFilter = sensor_ns.class_("TimeoutFilter", Filter, cg.Component)
|
||||
DebounceFilter = sensor_ns.class_("DebounceFilter", Filter, cg.Component)
|
||||
HeartbeatFilter = sensor_ns.class_("HeartbeatFilter", Filter, cg.Component)
|
||||
@@ -595,6 +596,25 @@ async def throttle_filter_to_code(config, filter_id):
|
||||
return cg.new_Pvariable(filter_id, config)
|
||||
|
||||
|
||||
TIMEOUT_WITH_PRIORITY_SCHEMA = cv.maybe_simple_value(
|
||||
{
|
||||
cv.Required(CONF_TIMEOUT): cv.positive_time_period_milliseconds,
|
||||
cv.Optional(CONF_VALUE, default="nan"): cv.ensure_list(cv.float_),
|
||||
},
|
||||
key=CONF_TIMEOUT,
|
||||
)
|
||||
|
||||
|
||||
@FILTER_REGISTRY.register(
|
||||
"throttle_with_priority",
|
||||
ThrottleWithPriorityFilter,
|
||||
TIMEOUT_WITH_PRIORITY_SCHEMA,
|
||||
)
|
||||
async def throttle_with_priority_filter_to_code(config, filter_id):
|
||||
template_ = [await cg.templatable(x, [], float) for x in config[CONF_VALUE]]
|
||||
return cg.new_Pvariable(filter_id, config[CONF_TIMEOUT], template_)
|
||||
|
||||
|
||||
@FILTER_REGISTRY.register(
|
||||
"heartbeat", HeartbeatFilter, cv.positive_time_period_milliseconds
|
||||
)
|
||||
|
@@ -1,5 +1,6 @@
|
||||
#include "filter.h"
|
||||
#include <cmath>
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "sensor.h"
|
||||
@@ -332,6 +333,40 @@ optional<float> ThrottleFilter::new_value(float value) {
|
||||
return {};
|
||||
}
|
||||
|
||||
// ThrottleWithPriorityFilter
|
||||
ThrottleWithPriorityFilter::ThrottleWithPriorityFilter(uint32_t min_time_between_inputs,
|
||||
std::vector<TemplatableValue<float>> prioritized_values)
|
||||
: min_time_between_inputs_(min_time_between_inputs), prioritized_values_(std::move(prioritized_values)) {}
|
||||
|
||||
optional<float> ThrottleWithPriorityFilter::new_value(float value) {
|
||||
bool is_prioritized_value = false;
|
||||
int8_t accuracy = this->parent_->get_accuracy_decimals();
|
||||
float accuracy_mult = powf(10.0f, accuracy);
|
||||
const uint32_t now = App.get_loop_component_start_time();
|
||||
// First, determine if the new value is one of the prioritized values
|
||||
for (auto prioritized_value : this->prioritized_values_) {
|
||||
if (std::isnan(prioritized_value.value())) {
|
||||
if (std::isnan(value)) {
|
||||
is_prioritized_value = true;
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
float rounded_prioritized_value = roundf(accuracy_mult * prioritized_value.value());
|
||||
float rounded_value = roundf(accuracy_mult * value);
|
||||
if (rounded_prioritized_value == rounded_value) {
|
||||
is_prioritized_value = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Finally, determine if the new value should be throttled and pass it through if not
|
||||
if (this->last_input_ == 0 || now - this->last_input_ >= min_time_between_inputs_ || is_prioritized_value) {
|
||||
this->last_input_ = now;
|
||||
return value;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
// DeltaFilter
|
||||
DeltaFilter::DeltaFilter(float delta, bool percentage_mode)
|
||||
: delta_(delta), current_delta_(delta), percentage_mode_(percentage_mode), last_value_(NAN) {}
|
||||
|
@@ -314,6 +314,20 @@ class ThrottleFilter : public Filter {
|
||||
uint32_t min_time_between_inputs_;
|
||||
};
|
||||
|
||||
/// Same as 'throttle' but will immediately publish values contained in `value_to_prioritize`.
|
||||
class ThrottleWithPriorityFilter : public Filter {
|
||||
public:
|
||||
explicit ThrottleWithPriorityFilter(uint32_t min_time_between_inputs,
|
||||
std::vector<TemplatableValue<float>> prioritized_values);
|
||||
|
||||
optional<float> new_value(float value) override;
|
||||
|
||||
protected:
|
||||
uint32_t last_input_{0};
|
||||
uint32_t min_time_between_inputs_;
|
||||
std::vector<TemplatableValue<float>> prioritized_values_;
|
||||
};
|
||||
|
||||
class TimeoutFilter : public Filter, public Component {
|
||||
public:
|
||||
explicit TimeoutFilter(uint32_t time_period, TemplatableValue<float> new_value);
|
||||
|
@@ -171,8 +171,7 @@ async def sim800l_dial_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def sim800l_connect_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
||||
|
||||
SIM800L_SEND_USSD_SCHEMA = cv.Schema(
|
||||
@@ -201,5 +200,4 @@ async def sim800l_send_ussd_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def sim800l_disconnect_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
@@ -122,5 +122,4 @@ UFIRE_EC_RESET_SCHEMA = cv.Schema(
|
||||
)
|
||||
async def ufire_ec_reset_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
@@ -123,5 +123,4 @@ UFIRE_ISE_RESET_SCHEMA = cv.Schema({cv.GenerateID(): cv.use_id(UFireISEComponent
|
||||
)
|
||||
async def ufire_ise_reset_to_code(config, action_id, template_arg, args):
|
||||
paren = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, paren)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, paren)
|
||||
|
@@ -474,8 +474,20 @@ const char *get_disconnect_reason_str(uint8_t reason) {
|
||||
return "Handshake Failed";
|
||||
case WIFI_REASON_CONNECTION_FAIL:
|
||||
return "Connection Failed";
|
||||
case WIFI_REASON_AP_TSF_RESET:
|
||||
return "AP TSF reset";
|
||||
case WIFI_REASON_ROAMING:
|
||||
return "Station Roaming";
|
||||
case WIFI_REASON_ASSOC_COMEBACK_TIME_TOO_LONG:
|
||||
return "Association comeback time too long";
|
||||
case WIFI_REASON_SA_QUERY_TIMEOUT:
|
||||
return "SA query timeout";
|
||||
case WIFI_REASON_NO_AP_FOUND_W_COMPATIBLE_SECURITY:
|
||||
return "No AP found with compatible security";
|
||||
case WIFI_REASON_NO_AP_FOUND_IN_AUTHMODE_THRESHOLD:
|
||||
return "No AP found in auth mode threshold";
|
||||
case WIFI_REASON_NO_AP_FOUND_IN_RSSI_THRESHOLD:
|
||||
return "No AP found in RSSI threshold";
|
||||
case WIFI_REASON_UNSPECIFIED:
|
||||
default:
|
||||
return "Unspecified";
|
||||
|
@@ -640,8 +640,20 @@ const char *get_disconnect_reason_str(uint8_t reason) {
|
||||
return "Handshake Failed";
|
||||
case WIFI_REASON_CONNECTION_FAIL:
|
||||
return "Connection Failed";
|
||||
case WIFI_REASON_AP_TSF_RESET:
|
||||
return "AP TSF reset";
|
||||
case WIFI_REASON_ROAMING:
|
||||
return "Station Roaming";
|
||||
case WIFI_REASON_ASSOC_COMEBACK_TIME_TOO_LONG:
|
||||
return "Association comeback time too long";
|
||||
case WIFI_REASON_SA_QUERY_TIMEOUT:
|
||||
return "SA query timeout";
|
||||
case WIFI_REASON_NO_AP_FOUND_W_COMPATIBLE_SECURITY:
|
||||
return "No AP found with compatible security";
|
||||
case WIFI_REASON_NO_AP_FOUND_IN_AUTHMODE_THRESHOLD:
|
||||
return "No AP found in auth mode threshold";
|
||||
case WIFI_REASON_NO_AP_FOUND_IN_RSSI_THRESHOLD:
|
||||
return "No AP found in RSSI threshold";
|
||||
case WIFI_REASON_UNSPECIFIED:
|
||||
default:
|
||||
return "Unspecified";
|
||||
|
@@ -111,8 +111,7 @@ def merge_config(full_old, full_new):
|
||||
else:
|
||||
ids[new_id] = len(res)
|
||||
res.append(v)
|
||||
res = [v for i, v in enumerate(res) if i not in ids_to_delete]
|
||||
return res
|
||||
return [v for i, v in enumerate(res) if i not in ids_to_delete]
|
||||
if new is None:
|
||||
return old
|
||||
|
||||
|
@@ -1868,7 +1868,7 @@ def validate_registry_entry(name, registry):
|
||||
|
||||
def none(value):
|
||||
if value in ("none", "None"):
|
||||
return None
|
||||
return
|
||||
raise Invalid("Must be none")
|
||||
|
||||
|
||||
|
@@ -115,7 +115,7 @@ async def build_registry_list(registry, config):
|
||||
|
||||
async def past_safe_mode():
|
||||
if CONF_SAFE_MODE not in CORE.config:
|
||||
return
|
||||
return None
|
||||
|
||||
def _safe_mode_generator():
|
||||
while True:
|
||||
|
@@ -36,7 +36,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def config_from_env():
|
||||
config = {
|
||||
return {
|
||||
CONF_MQTT: {
|
||||
CONF_USERNAME: get_str_env("ESPHOME_DASHBOARD_MQTT_USERNAME"),
|
||||
CONF_PASSWORD: get_str_env("ESPHOME_DASHBOARD_MQTT_PASSWORD"),
|
||||
@@ -44,7 +44,6 @@ def config_from_env():
|
||||
CONF_PORT: get_int_env("ESPHOME_DASHBOARD_MQTT_PORT", 1883),
|
||||
},
|
||||
}
|
||||
return config
|
||||
|
||||
|
||||
def initialize(
|
||||
|
@@ -81,8 +81,7 @@ def _print_file_read_event(path: str) -> None:
|
||||
|
||||
def _request_and_get_stream_on_stdin(fname: str) -> StringIO:
|
||||
_print_file_read_event(fname)
|
||||
raw_yaml_stream = StringIO(_read_file_content_from_json_on_stdin())
|
||||
return raw_yaml_stream
|
||||
return StringIO(_read_file_content_from_json_on_stdin())
|
||||
|
||||
|
||||
def _vscode_loader(fname: str) -> dict[str, Any]:
|
||||
|
@@ -305,8 +305,7 @@ class ESPHomeLoaderMixin:
|
||||
result = self.yaml_loader(self._rel_path(file))
|
||||
if not vars:
|
||||
vars = {}
|
||||
result = substitute_vars(result, vars)
|
||||
return result
|
||||
return substitute_vars(result, vars)
|
||||
|
||||
@_add_data_ref
|
||||
def construct_include_dir_list(self, node: yaml.Node) -> list[dict[str, Any]]:
|
||||
|
@@ -118,6 +118,7 @@ select = [
|
||||
"PERF", # performance
|
||||
"PL", # pylint
|
||||
"SIM", # flake8-simplify
|
||||
"RET", # flake8-ret
|
||||
"UP", # pyupgrade
|
||||
]
|
||||
|
||||
|
@@ -539,8 +539,7 @@ class BoolType(TypeInfo):
|
||||
wire_type = WireType.VARINT # Uses wire type 0
|
||||
|
||||
def dump(self, name: str) -> str:
|
||||
o = f"out.append(YESNO({name}));"
|
||||
return o
|
||||
return f"out.append(YESNO({name}));"
|
||||
|
||||
def get_size_calculation(self, name: str, force: bool = False) -> str:
|
||||
return self._get_simple_size_calculation(name, force, "add_bool")
|
||||
@@ -592,9 +591,8 @@ class StringType(TypeInfo):
|
||||
if no_zero_copy:
|
||||
# Use the std::string directly
|
||||
return f"buffer.encode_string({self.number}, this->{self.field_name});"
|
||||
else:
|
||||
# Use the StringRef
|
||||
return f"buffer.encode_string({self.number}, this->{self.field_name}_ref_);"
|
||||
# Use the StringRef
|
||||
return f"buffer.encode_string({self.number}, this->{self.field_name}_ref_);"
|
||||
|
||||
def dump(self, name):
|
||||
# Check if no_zero_copy option is set
|
||||
@@ -716,8 +714,7 @@ class MessageType(TypeInfo):
|
||||
return f"case {self.number}: value.decode_to_message(this->{self.field_name}); break;"
|
||||
|
||||
def dump(self, name: str) -> str:
|
||||
o = f"{name}.dump_to(out);"
|
||||
return o
|
||||
return f"{name}.dump_to(out);"
|
||||
|
||||
@property
|
||||
def dump_content(self) -> str:
|
||||
@@ -865,8 +862,7 @@ class FixedArrayBytesType(TypeInfo):
|
||||
return f"buffer.encode_bytes({self.number}, this->{self.field_name}, this->{self.field_name}_len);"
|
||||
|
||||
def dump(self, name: str) -> str:
|
||||
o = f"out.append(format_hex_pretty({name}, {name}_len));"
|
||||
return o
|
||||
return f"out.append(format_hex_pretty({name}, {name}_len));"
|
||||
|
||||
@property
|
||||
def dump_content(self) -> str:
|
||||
@@ -883,9 +879,8 @@ class FixedArrayBytesType(TypeInfo):
|
||||
if force:
|
||||
# For repeated fields, always calculate size (no zero check)
|
||||
return f"size.add_length_force({field_id_size}, {length_field});"
|
||||
else:
|
||||
# For non-repeated fields, add_length already checks for zero
|
||||
return f"size.add_length({field_id_size}, {length_field});"
|
||||
# For non-repeated fields, add_length already checks for zero
|
||||
return f"size.add_length({field_id_size}, {length_field});"
|
||||
|
||||
def get_estimated_size(self) -> int:
|
||||
# Estimate based on typical BLE advertisement size
|
||||
@@ -940,8 +935,7 @@ class EnumType(TypeInfo):
|
||||
return f"buffer.{self.encode_func}({self.number}, static_cast<uint32_t>(this->{self.field_name}));"
|
||||
|
||||
def dump(self, name: str) -> str:
|
||||
o = f"out.append(proto_enum_to_string<{self.cpp_type}>({name}));"
|
||||
return o
|
||||
return f"out.append(proto_enum_to_string<{self.cpp_type}>({name}));"
|
||||
|
||||
def dump_field_value(self, value: str) -> str:
|
||||
# Enums need explicit cast for the template
|
||||
@@ -1110,13 +1104,12 @@ class FixedArrayRepeatedType(TypeInfo):
|
||||
def encode_element(element: str) -> str:
|
||||
if isinstance(self._ti, EnumType):
|
||||
return f"buffer.{self._ti.encode_func}({self.number}, static_cast<uint32_t>({element}), true);"
|
||||
else:
|
||||
return f"buffer.{self._ti.encode_func}({self.number}, {element}, true);"
|
||||
return f"buffer.{self._ti.encode_func}({self.number}, {element}, true);"
|
||||
|
||||
# Unroll small arrays for efficiency
|
||||
if self.array_size == 1:
|
||||
return encode_element(f"this->{self.field_name}[0]")
|
||||
elif self.array_size == 2:
|
||||
if self.array_size == 2:
|
||||
return (
|
||||
encode_element(f"this->{self.field_name}[0]")
|
||||
+ "\n "
|
||||
@@ -1195,8 +1188,7 @@ class RepeatedTypeInfo(TypeInfo):
|
||||
# use it as-is, otherwise append the element type
|
||||
if "<" in self._container_type and ">" in self._container_type:
|
||||
return f"const {self._container_type}*"
|
||||
else:
|
||||
return f"const {self._container_type}<{self._ti.cpp_type}>*"
|
||||
return f"const {self._container_type}<{self._ti.cpp_type}>*"
|
||||
return f"std::vector<{self._ti.cpp_type}>"
|
||||
|
||||
@property
|
||||
@@ -1282,14 +1274,13 @@ class RepeatedTypeInfo(TypeInfo):
|
||||
o += f" buffer.{self._ti.encode_func}({self.number}, it, true);\n"
|
||||
o += "}"
|
||||
return o
|
||||
o = f"for (auto {'' if self._ti_is_bool else '&'}it : this->{self.field_name}) {{\n"
|
||||
if isinstance(self._ti, EnumType):
|
||||
o += f" buffer.{self._ti.encode_func}({self.number}, static_cast<uint32_t>(it), true);\n"
|
||||
else:
|
||||
o = f"for (auto {'' if self._ti_is_bool else '&'}it : this->{self.field_name}) {{\n"
|
||||
if isinstance(self._ti, EnumType):
|
||||
o += f" buffer.{self._ti.encode_func}({self.number}, static_cast<uint32_t>(it), true);\n"
|
||||
else:
|
||||
o += f" buffer.{self._ti.encode_func}({self.number}, it, true);\n"
|
||||
o += "}"
|
||||
return o
|
||||
o += f" buffer.{self._ti.encode_func}({self.number}, it, true);\n"
|
||||
o += "}"
|
||||
return o
|
||||
|
||||
@property
|
||||
def dump_content(self) -> str:
|
||||
|
@@ -444,8 +444,7 @@ def get_str_path_schema(strPath):
|
||||
if len(parts) > 2:
|
||||
parts[0] += "." + parts[1]
|
||||
parts[1] = parts[2]
|
||||
s1 = output.get(parts[0], {}).get(S_SCHEMAS, {}).get(parts[1], {})
|
||||
return s1
|
||||
return output.get(parts[0], {}).get(S_SCHEMAS, {}).get(parts[1], {})
|
||||
|
||||
|
||||
def pop_str_path_schema(strPath):
|
||||
|
@@ -42,12 +42,11 @@ CONFIG_NEWLIB_LIBC=y
|
||||
|
||||
def extract_defines(command):
|
||||
define_pattern = re.compile(r"-D\s*([^\s]+)")
|
||||
defines = [
|
||||
return [
|
||||
match
|
||||
for match in define_pattern.findall(command)
|
||||
if match not in ("_ASMLANGUAGE")
|
||||
]
|
||||
return defines
|
||||
|
||||
def find_cxx_path(commands):
|
||||
for entry in commands:
|
||||
@@ -56,6 +55,7 @@ CONFIG_NEWLIB_LIBC=y
|
||||
if not cxx_path.endswith("++"):
|
||||
continue
|
||||
return cxx_path
|
||||
return None
|
||||
|
||||
def get_builtin_include_paths(compiler):
|
||||
result = subprocess.run(
|
||||
@@ -83,11 +83,10 @@ CONFIG_NEWLIB_LIBC=y
|
||||
flag_pattern = re.compile(
|
||||
r"(-O[0-3s]|-g|-std=[^\s]+|-Wall|-Wextra|-Werror|--[^\s]+|-f[^\s]+|-m[^\s]+|-imacros\s*[^\s]+)"
|
||||
)
|
||||
flags = [
|
||||
return [
|
||||
match.replace("-imacros ", "-imacros")
|
||||
for match in flag_pattern.findall(command)
|
||||
]
|
||||
return flags
|
||||
|
||||
def transform_to_idedata_format(compile_commands):
|
||||
cxx_path = find_cxx_path(compile_commands)
|
||||
|
@@ -1,44 +1,3 @@
|
||||
sensor:
|
||||
- platform: template
|
||||
name: "Template Sensor"
|
||||
id: template_sens
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
update_interval: 60s
|
||||
filters:
|
||||
- offset: 10
|
||||
- multiply: 1
|
||||
- offset: !lambda return 10;
|
||||
- multiply: !lambda return 2;
|
||||
- filter_out:
|
||||
- 10
|
||||
- 20
|
||||
- !lambda return 10;
|
||||
- filter_out: 10
|
||||
- filter_out: !lambda return NAN;
|
||||
- timeout:
|
||||
timeout: 10s
|
||||
value: !lambda return 10;
|
||||
- timeout:
|
||||
timeout: 1h
|
||||
value: 20.0
|
||||
- timeout:
|
||||
timeout: 1d
|
||||
- to_ntc_resistance:
|
||||
calibration:
|
||||
- 10.0kOhm -> 25°C
|
||||
- 27.219kOhm -> 0°C
|
||||
- 14.674kOhm -> 15°C
|
||||
- to_ntc_temperature:
|
||||
calibration:
|
||||
- 10.0kOhm -> 25°C
|
||||
- 27.219kOhm -> 0°C
|
||||
- 14.674kOhm -> 15°C
|
||||
|
||||
esphome:
|
||||
on_boot:
|
||||
- sensor.template.publish:
|
||||
@@ -82,6 +41,123 @@ binary_sensor:
|
||||
sensor.in_range:
|
||||
id: template_sens
|
||||
below: 30.0
|
||||
filters:
|
||||
- invert:
|
||||
- delayed_on: 100ms
|
||||
- delayed_off: 100ms
|
||||
- delayed_on_off: !lambda "if (id(test_switch).state) return 1000; else return 0;"
|
||||
- delayed_on_off:
|
||||
time_on: 10s
|
||||
time_off: !lambda "if (id(test_switch).state) return 1000; else return 0;"
|
||||
- autorepeat:
|
||||
- delay: 1s
|
||||
time_off: 100ms
|
||||
time_on: 900ms
|
||||
- delay: 5s
|
||||
time_off: 100ms
|
||||
time_on: 400ms
|
||||
- lambda: |-
|
||||
if (id(other_binary_sensor).state) {
|
||||
return x;
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
- settle: 500ms
|
||||
- timeout: 5s
|
||||
|
||||
sensor:
|
||||
- platform: template
|
||||
name: "Template Sensor"
|
||||
id: template_sens
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
return 42.0;
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
update_interval: 60s
|
||||
filters:
|
||||
- calibrate_linear:
|
||||
- 0.0 -> 0.0
|
||||
- 40.0 -> 45.0
|
||||
- 100.0 -> 102.5
|
||||
- calibrate_polynomial:
|
||||
degree: 2
|
||||
datapoints:
|
||||
# Map 0.0 (from sensor) to 0.0 (true value)
|
||||
- 0.0 -> 0.0
|
||||
- 10.0 -> 12.1
|
||||
- 13.0 -> 14.0
|
||||
- clamp:
|
||||
max_value: 10.0
|
||||
min_value: -10.0
|
||||
- debounce: 0.1s
|
||||
- delta: 5.0
|
||||
- exponential_moving_average:
|
||||
alpha: 0.1
|
||||
send_every: 15
|
||||
- filter_out:
|
||||
- 10
|
||||
- 20
|
||||
- !lambda return 10;
|
||||
- filter_out: 10
|
||||
- filter_out: !lambda return NAN;
|
||||
- heartbeat: 5s
|
||||
- lambda: return x * (9.0/5.0) + 32.0;
|
||||
- max:
|
||||
window_size: 10
|
||||
send_every: 2
|
||||
send_first_at: 1
|
||||
- median:
|
||||
window_size: 7
|
||||
send_every: 4
|
||||
send_first_at: 3
|
||||
- min:
|
||||
window_size: 10
|
||||
send_every: 2
|
||||
send_first_at: 1
|
||||
- multiply: 1
|
||||
- multiply: !lambda return 2;
|
||||
- offset: 10
|
||||
- offset: !lambda return 10;
|
||||
- or:
|
||||
- quantile:
|
||||
window_size: 7
|
||||
send_every: 4
|
||||
send_first_at: 3
|
||||
quantile: .9
|
||||
- round: 1
|
||||
- round_to_multiple_of: 0.25
|
||||
- skip_initial: 3
|
||||
- sliding_window_moving_average:
|
||||
window_size: 15
|
||||
send_every: 15
|
||||
- throttle: 1s
|
||||
- throttle_average: 2s
|
||||
- throttle_with_priority: 5s
|
||||
- throttle_with_priority:
|
||||
timeout: 3s
|
||||
value:
|
||||
- 42.0
|
||||
- nan
|
||||
- timeout:
|
||||
timeout: 10s
|
||||
value: !lambda return 10;
|
||||
- timeout:
|
||||
timeout: 1h
|
||||
value: 20.0
|
||||
- timeout:
|
||||
timeout: 1d
|
||||
- to_ntc_resistance:
|
||||
calibration:
|
||||
- 10.0kOhm -> 25°C
|
||||
- 27.219kOhm -> 0°C
|
||||
- 14.674kOhm -> 15°C
|
||||
- to_ntc_temperature:
|
||||
calibration:
|
||||
- 10.0kOhm -> 25°C
|
||||
- 27.219kOhm -> 0°C
|
||||
- 14.674kOhm -> 15°C
|
||||
|
||||
output:
|
||||
- platform: template
|
||||
@@ -92,6 +168,7 @@ output:
|
||||
|
||||
switch:
|
||||
- platform: template
|
||||
id: test_switch
|
||||
name: "Template Switch"
|
||||
lambda: |-
|
||||
if (id(some_binary_sensor).state) {
|
||||
|
@@ -31,8 +31,7 @@ class DashboardTestHelper:
|
||||
else:
|
||||
url = f"http://127.0.0.1:{self.port}{path}"
|
||||
future = self.client.fetch(url, raise_error=True, **kwargs)
|
||||
result = await future
|
||||
return result
|
||||
return await future
|
||||
|
||||
|
||||
@pytest_asyncio.fixture()
|
||||
|
@@ -251,19 +251,18 @@ async def compile_esphome(
|
||||
if proc.returncode == 0:
|
||||
# Success!
|
||||
break
|
||||
elif proc.returncode == -11 and attempt < max_retries - 1:
|
||||
if proc.returncode == -11 and attempt < max_retries - 1:
|
||||
# Segfault (-11 = SIGSEGV), retry
|
||||
print(
|
||||
f"Compilation segfaulted (attempt {attempt + 1}/{max_retries}), retrying..."
|
||||
)
|
||||
await asyncio.sleep(1) # Brief pause before retry
|
||||
continue
|
||||
else:
|
||||
# Other error or final retry
|
||||
raise RuntimeError(
|
||||
f"Failed to compile {config_path}, return code: {proc.returncode}. "
|
||||
f"Run with 'pytest -s' to see compilation output."
|
||||
)
|
||||
# Other error or final retry
|
||||
raise RuntimeError(
|
||||
f"Failed to compile {config_path}, return code: {proc.returncode}. "
|
||||
f"Run with 'pytest -s' to see compilation output."
|
||||
)
|
||||
|
||||
# Load the config to get idedata (blocking call, must use executor)
|
||||
loop = asyncio.get_running_loop()
|
||||
|
@@ -72,8 +72,7 @@ DisableAction = loop_test_component_ns.class_("DisableAction", automation.Action
|
||||
)
|
||||
async def enable_to_code(config, action_id, template_arg, args):
|
||||
parent = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, parent)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, parent)
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
@@ -87,8 +86,7 @@ async def enable_to_code(config, action_id, template_arg, args):
|
||||
)
|
||||
async def disable_to_code(config, action_id, template_arg, args):
|
||||
parent = await cg.get_variable(config[CONF_ID])
|
||||
var = cg.new_Pvariable(action_id, template_arg, parent)
|
||||
return var
|
||||
return cg.new_Pvariable(action_id, template_arg, parent)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
|
@@ -69,7 +69,7 @@ def test_calculate_clang_tidy_hash() -> None:
|
||||
def read_file_mock(path: Path) -> bytes:
|
||||
if ".clang-tidy" in str(path):
|
||||
return clang_tidy_content
|
||||
elif "platformio.ini" in str(path):
|
||||
if "platformio.ini" in str(path):
|
||||
return platformio_content
|
||||
return b""
|
||||
|
||||
|
@@ -315,9 +315,8 @@ def test_local_development_no_remotes_configured(monkeypatch: MonkeyPatch) -> No
|
||||
def side_effect_func(*args):
|
||||
if args == ("git", "remote"):
|
||||
return "origin\nupstream\n"
|
||||
else:
|
||||
# All merge-base attempts fail
|
||||
raise Exception("Command failed")
|
||||
# All merge-base attempts fail
|
||||
raise Exception("Command failed")
|
||||
|
||||
mock_output.side_effect = side_effect_func
|
||||
|
||||
|
@@ -18,11 +18,10 @@ def sort_dicts(obj):
|
||||
"""Recursively sort dictionaries for order-insensitive comparison."""
|
||||
if isinstance(obj, dict):
|
||||
return {k: sort_dicts(obj[k]) for k in sorted(obj)}
|
||||
elif isinstance(obj, list):
|
||||
if isinstance(obj, list):
|
||||
# Lists are not sorted; we preserve order
|
||||
return [sort_dicts(i) for i in obj]
|
||||
else:
|
||||
return obj
|
||||
return obj
|
||||
|
||||
|
||||
def dict_diff(a, b, path=""):
|
||||
|
@@ -22,8 +22,7 @@ def _run_repl_test(input_data):
|
||||
call[0][0] for call in mock_stdout.write.call_args_list
|
||||
).strip()
|
||||
splitted_output = full_output.split("\n")
|
||||
remove_version = splitted_output[1:] # remove first entry with version info
|
||||
return remove_version
|
||||
return splitted_output[1:] # remove first entry with version info
|
||||
|
||||
|
||||
def _validate(file_path: str):
|
||||
|
Reference in New Issue
Block a user