mirror of
https://github.com/esphome/esphome.git
synced 2025-08-10 20:29:24 +00:00
Compare commits
18 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
925e3cb6c9 | ||
![]() |
6757acba56 | ||
![]() |
5cc91cdd95 | ||
![]() |
2b41886819 | ||
![]() |
72c6efd6a0 | ||
![]() |
a1f1804112 | ||
![]() |
a8b1ceb4e9 | ||
![]() |
4fb0f7f8c6 | ||
![]() |
958cadeca8 | ||
![]() |
00f2655f1a | ||
![]() |
074f5029eb | ||
![]() |
1691976587 | ||
![]() |
60e6b4d21e | ||
![]() |
5750591df2 | ||
![]() |
a75da54455 | ||
![]() |
de7f6c3f5f | ||
![]() |
4245480656 | ||
![]() |
1824c8131e |
88
.github/workflows/release.yml
vendored
88
.github/workflows/release.yml
vendored
@@ -30,6 +30,10 @@ jobs:
|
||||
TAG=$(cat esphome/const.py | sed -n -E "s/^__version__\s+=\s+\"(.+)\"$/\1/p")
|
||||
today="$(date --utc '+%Y%m%d')"
|
||||
TAG="${TAG}${today}"
|
||||
BRANCH=${GITHUB_REF#refs/heads/}
|
||||
if [[ "$BRANCH" != "dev" ]]; then
|
||||
TAG="${TAG}-${BRANCH}"
|
||||
fi
|
||||
fi
|
||||
echo "tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
# yamllint enable rule:line-length
|
||||
@@ -57,7 +61,7 @@ jobs:
|
||||
run: twine upload dist/*
|
||||
|
||||
deploy-docker:
|
||||
name: Build and publish docker containers
|
||||
name: Build and publish ESPHome ${{ matrix.image.title}}
|
||||
if: github.repository == 'esphome/esphome'
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -66,8 +70,19 @@ jobs:
|
||||
needs: [init]
|
||||
strategy:
|
||||
matrix:
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["ha-addon", "docker", "lint"]
|
||||
image:
|
||||
- title: "ha-addon"
|
||||
suffix: "hassio"
|
||||
target: "hassio"
|
||||
baseimg: "hassio"
|
||||
- title: "docker"
|
||||
suffix: ""
|
||||
target: "docker"
|
||||
baseimg: "docker"
|
||||
- title: "lint"
|
||||
suffix: "lint"
|
||||
target: "lint"
|
||||
baseimg: "docker"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
@@ -92,54 +107,29 @@ jobs:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate short tags
|
||||
id: tags
|
||||
run: |
|
||||
docker/generate_tags.py \
|
||||
--tag "${{ needs.init.outputs.tag }}" \
|
||||
--suffix "${{ matrix.image.suffix }}"
|
||||
|
||||
- name: Build and push
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${{ needs.init.outputs.tag }}" \
|
||||
--arch "${{ matrix.arch }}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
build \
|
||||
--push
|
||||
|
||||
deploy-docker-manifest:
|
||||
if: github.repository == 'esphome/esphome'
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
runs-on: ubuntu-latest
|
||||
needs: [init, deploy-docker]
|
||||
strategy:
|
||||
matrix:
|
||||
build_type: ["ha-addon", "docker", "lint"]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
python-version: "3.9"
|
||||
- name: Enable experimental manifest support
|
||||
run: |
|
||||
mkdir -p ~/.docker
|
||||
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Run manifest
|
||||
run: |
|
||||
docker/build.py \
|
||||
--tag "${{ needs.init.outputs.tag }}" \
|
||||
--build-type "${{ matrix.build_type }}" \
|
||||
manifest
|
||||
context: .
|
||||
file: ./docker/Dockerfile
|
||||
platforms: linux/amd64,linux/arm/v7,linux/arm64
|
||||
target: ${{ matrix.image.target }}
|
||||
push: true
|
||||
# yamllint disable rule:line-length
|
||||
cache-from: type=registry,ref=ghcr.io/${{ steps.tags.outputs.image }}:cache-${{ steps.tags.outputs.channel }}
|
||||
cache-to: type=registry,ref=ghcr.io/${{ steps.tags.outputs.image }}:cache-${{ steps.tags.outputs.channel }},mode=max
|
||||
# yamllint enable rule:line-length
|
||||
tags: ${{ steps.tags.outputs.tags }}
|
||||
build-args: |
|
||||
BASEIMGTYPE=${{ matrix.image.baseimg }}
|
||||
BUILD_VERSION=${{ needs.init.outputs.tag }}
|
||||
|
||||
deploy-ha-addon-repo:
|
||||
if: github.repository == 'esphome/esphome' && github.event_name == 'release'
|
||||
|
@@ -139,7 +139,7 @@ RUN \
|
||||
clang-tidy-11=1:11.0.1-2 \
|
||||
patch=2.7.6-7 \
|
||||
software-properties-common=0.96.20.2-2.1 \
|
||||
nano=5.4-2+deb11u1 \
|
||||
nano=5.4-2+deb11u2 \
|
||||
build-essential=12.9 \
|
||||
python3-dev=3.9.2-3 \
|
||||
&& rm -rf \
|
||||
|
68
docker/generate_tags.py
Executable file
68
docker/generate_tags.py
Executable file
@@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
import os
|
||||
import argparse
|
||||
import json
|
||||
|
||||
CHANNEL_DEV = "dev"
|
||||
CHANNEL_BETA = "beta"
|
||||
CHANNEL_RELEASE = "release"
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--tag",
|
||||
type=str,
|
||||
required=True,
|
||||
help="The main docker tag to push to. If a version number also adds latest and/or beta tag",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--suffix",
|
||||
type=str,
|
||||
required=True,
|
||||
help="The suffix of the tag.",
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
# detect channel from tag
|
||||
match = re.match(r"^(\d+\.\d+)(?:\.\d+)?(b\d+)?$", args.tag)
|
||||
major_minor_version = None
|
||||
if match is None:
|
||||
channel = CHANNEL_DEV
|
||||
elif match.group(2) is None:
|
||||
major_minor_version = match.group(1)
|
||||
channel = CHANNEL_RELEASE
|
||||
else:
|
||||
channel = CHANNEL_BETA
|
||||
|
||||
tags_to_push = [args.tag]
|
||||
if channel == CHANNEL_DEV:
|
||||
tags_to_push.append("dev")
|
||||
elif channel == CHANNEL_BETA:
|
||||
tags_to_push.append("beta")
|
||||
elif channel == CHANNEL_RELEASE:
|
||||
# Additionally push to beta
|
||||
tags_to_push.append("beta")
|
||||
tags_to_push.append("latest")
|
||||
|
||||
if major_minor_version:
|
||||
tags_to_push.append("stable")
|
||||
tags_to_push.append(major_minor_version)
|
||||
|
||||
suffix = f"-{args.suffix}" if args.suffix else ""
|
||||
|
||||
with open(os.environ["GITHUB_OUTPUT"], "w") as f:
|
||||
print(f"channel={channel}", file=f)
|
||||
print(f"image=esphome/esphome{suffix}", file=f)
|
||||
full_tags = []
|
||||
|
||||
for tag in tags_to_push:
|
||||
full_tags += [f"ghcr.io/esphome/esphome{suffix}:{tag}"]
|
||||
full_tags += [f"esphome/esphome{suffix}:{tag}"]
|
||||
print(f"tags={','.join(full_tags)}", file=f)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -4,7 +4,7 @@ import requests
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components.packages import validate_source_shorthand
|
||||
from esphome.const import CONF_WIFI
|
||||
from esphome.const import CONF_WIFI, CONF_REF
|
||||
from esphome.wizard import wizard_file
|
||||
from esphome.yaml_util import dump
|
||||
from esphome import git
|
||||
@@ -21,19 +21,32 @@ CODEOWNERS = ["@esphome/core"]
|
||||
def validate_import_url(value):
|
||||
value = cv.string_strict(value)
|
||||
value = cv.Length(max=255)(value)
|
||||
# ignore result, only check if it's a valid shorthand
|
||||
validate_source_shorthand(value)
|
||||
return value
|
||||
|
||||
|
||||
def validate_full_url(config):
|
||||
if not config[CONF_IMPORT_FULL_CONFIG]:
|
||||
return config
|
||||
source = validate_source_shorthand(config[CONF_PACKAGE_IMPORT_URL])
|
||||
if CONF_REF not in source:
|
||||
raise cv.Invalid(
|
||||
"Must specify a ref (branch or tag) to import from when importing full config"
|
||||
)
|
||||
return config
|
||||
|
||||
|
||||
CONF_PACKAGE_IMPORT_URL = "package_import_url"
|
||||
CONF_IMPORT_FULL_CONFIG = "import_full_config"
|
||||
|
||||
CONFIG_SCHEMA = cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_PACKAGE_IMPORT_URL): validate_import_url,
|
||||
cv.Optional(CONF_IMPORT_FULL_CONFIG, default=False): cv.boolean,
|
||||
}
|
||||
CONFIG_SCHEMA = cv.All(
|
||||
cv.Schema(
|
||||
{
|
||||
cv.Required(CONF_PACKAGE_IMPORT_URL): validate_import_url,
|
||||
cv.Optional(CONF_IMPORT_FULL_CONFIG, default=False): cv.boolean,
|
||||
}
|
||||
),
|
||||
validate_full_url,
|
||||
)
|
||||
|
||||
WIFI_CONFIG = """
|
||||
@@ -49,7 +62,7 @@ async def to_code(config):
|
||||
url = config[CONF_PACKAGE_IMPORT_URL]
|
||||
if config[CONF_IMPORT_FULL_CONFIG]:
|
||||
url += "?full_config"
|
||||
cg.add(dashboard_import_ns.set_package_import_url(config[CONF_PACKAGE_IMPORT_URL]))
|
||||
cg.add(dashboard_import_ns.set_package_import_url(url))
|
||||
|
||||
|
||||
def import_config(
|
||||
|
@@ -95,7 +95,7 @@ void ESP32InternalGPIOPin::pin_mode(gpio::Flags flags) {
|
||||
// can't call gpio_config here because that logs in esp-idf which may cause issues
|
||||
gpio_set_direction(pin_, flags_to_mode(flags));
|
||||
gpio_pull_mode_t pull_mode = GPIO_FLOATING;
|
||||
if (flags & (gpio::FLAG_PULLUP | gpio::FLAG_PULLDOWN)) {
|
||||
if ((flags & gpio::FLAG_PULLUP) && (flags & gpio::FLAG_PULLDOWN)) {
|
||||
pull_mode = GPIO_PULLUP_PULLDOWN;
|
||||
} else if (flags & gpio::FLAG_PULLUP) {
|
||||
pull_mode = GPIO_PULLUP_ONLY;
|
||||
@@ -128,7 +128,7 @@ void IRAM_ATTR ISRInternalGPIOPin::pin_mode(gpio::Flags flags) {
|
||||
auto *arg = reinterpret_cast<ISRPinArg *>(arg_);
|
||||
gpio_set_direction(arg->pin, flags_to_mode(flags));
|
||||
gpio_pull_mode_t pull_mode = GPIO_FLOATING;
|
||||
if (flags & (gpio::FLAG_PULLUP | gpio::FLAG_PULLDOWN)) {
|
||||
if ((flags & gpio::FLAG_PULLUP) && (flags & gpio::FLAG_PULLDOWN)) {
|
||||
pull_mode = GPIO_PULLUP_PULLDOWN;
|
||||
} else if (flags & gpio::FLAG_PULLUP) {
|
||||
pull_mode = GPIO_PULLUP_ONLY;
|
||||
|
@@ -132,16 +132,16 @@ bool BLEClientBase::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_
|
||||
this->set_state(espbt::ClientState::IDLE);
|
||||
break;
|
||||
}
|
||||
if (this->connection_type_ == espbt::ConnectionType::V3_WITH_CACHE) {
|
||||
this->set_state(espbt::ClientState::CONNECTED);
|
||||
this->state_ = espbt::ClientState::ESTABLISHED;
|
||||
break;
|
||||
}
|
||||
auto ret = esp_ble_gattc_send_mtu_req(this->gattc_if_, param->open.conn_id);
|
||||
if (ret) {
|
||||
ESP_LOGW(TAG, "[%d] [%s] esp_ble_gattc_send_mtu_req failed, status=%x", this->connection_index_,
|
||||
this->address_str_.c_str(), ret);
|
||||
}
|
||||
if (this->connection_type_ == espbt::ConnectionType::V3_WITH_CACHE) {
|
||||
this->set_state(espbt::ClientState::CONNECTED);
|
||||
this->state_ = espbt::ClientState::ESTABLISHED;
|
||||
break;
|
||||
}
|
||||
esp_ble_gattc_search_service(esp_gattc_if, param->cfg_mtu.conn_id, nullptr);
|
||||
break;
|
||||
}
|
||||
|
@@ -706,11 +706,7 @@ void ESPBTDevice::parse_adv_(const esp_ble_gap_cb_param_t::ble_scan_result_evt_p
|
||||
while (offset + 2 < len) {
|
||||
const uint8_t field_length = payload[offset++]; // First byte is length of adv record
|
||||
if (field_length == 0) {
|
||||
if (offset < param.adv_data_len && param.scan_rsp_len > 0) { // Zero padded advertisement data
|
||||
offset = param.adv_data_len;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
continue; // Possible zero padded advertisement data
|
||||
}
|
||||
|
||||
// first byte of adv record is adv record type
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""Constants used by esphome."""
|
||||
|
||||
__version__ = "2022.12.1"
|
||||
__version__ = "2022.12.6"
|
||||
|
||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||
|
||||
|
@@ -129,9 +129,9 @@ class GitFile:
|
||||
def raw_url(self) -> str:
|
||||
if self.ref is None:
|
||||
raise ValueError("URL has no ref")
|
||||
if self.domain == "github":
|
||||
if self.domain == "github.com":
|
||||
return f"https://raw.githubusercontent.com/{self.owner}/{self.repo}/{self.ref}/{self.filename}"
|
||||
if self.domain == "gitlab":
|
||||
if self.domain == "gitlab.com":
|
||||
return f"https://gitlab.com/{self.owner}/{self.repo}/-/raw/{self.ref}/{self.filename}"
|
||||
raise NotImplementedError(f"Git domain {self.domain} not supported")
|
||||
|
||||
|
Reference in New Issue
Block a user