mirror of
https://github.com/esphome/esphome.git
synced 2024-11-21 11:37:27 +01:00
Bump python min to 3.9 (#3871)
This commit is contained in:
parent
c3a8972550
commit
d220d41182
@ -30,4 +30,4 @@ repos:
|
|||||||
rev: v3.0.0
|
rev: v3.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: pyupgrade
|
||||||
args: [--py38-plus]
|
args: [--py39-plus]
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import List
|
|
||||||
|
|
||||||
from esphome.const import (
|
from esphome.const import (
|
||||||
CONF_ID,
|
CONF_ID,
|
||||||
@ -200,7 +199,7 @@ async def esp8266_pin_to_code(config):
|
|||||||
@coroutine_with_priority(-999.0)
|
@coroutine_with_priority(-999.0)
|
||||||
async def add_pin_initial_states_array():
|
async def add_pin_initial_states_array():
|
||||||
# Add includes at the very end, so that they override everything
|
# Add includes at the very end, so that they override everything
|
||||||
initial_states: List[PinInitialState] = CORE.data[KEY_ESP8266][
|
initial_states: list[PinInitialState] = CORE.data[KEY_ESP8266][
|
||||||
KEY_PIN_INITIAL_STATES
|
KEY_PIN_INITIAL_STATES
|
||||||
]
|
]
|
||||||
initial_modes_s = ", ".join(str(x.mode) for x in initial_states)
|
initial_modes_s = ", ".join(str(x.mode) for x in initial_states)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, List
|
from typing import Any
|
||||||
import esphome.codegen as cg
|
import esphome.codegen as cg
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome.const import (
|
from esphome.const import (
|
||||||
@ -349,7 +349,7 @@ def _spi_extra_validate(config):
|
|||||||
class MethodDescriptor:
|
class MethodDescriptor:
|
||||||
method_schema: Any
|
method_schema: Any
|
||||||
to_code: Any
|
to_code: Any
|
||||||
supported_chips: List[str]
|
supported_chips: list[str]
|
||||||
extra_validate: Any = None
|
extra_validate: Any = None
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
from typing import List
|
|
||||||
import esphome.codegen as cg
|
import esphome.codegen as cg
|
||||||
import esphome.config_validation as cv
|
import esphome.config_validation as cv
|
||||||
from esphome import automation
|
from esphome import automation
|
||||||
@ -60,7 +59,7 @@ SELECT_SCHEMA = cv.ENTITY_BASE_SCHEMA.extend(cv.MQTT_COMMAND_COMPONENT_SCHEMA).e
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def setup_select_core_(var, config, *, options: List[str]):
|
async def setup_select_core_(var, config, *, options: list[str]):
|
||||||
await setup_entity(var, config)
|
await setup_entity(var, config)
|
||||||
|
|
||||||
cg.add(var.traits.set_options(options))
|
cg.add(var.traits.set_options(options))
|
||||||
@ -76,14 +75,14 @@ async def setup_select_core_(var, config, *, options: List[str]):
|
|||||||
await mqtt.register_mqtt_component(mqtt_, config)
|
await mqtt.register_mqtt_component(mqtt_, config)
|
||||||
|
|
||||||
|
|
||||||
async def register_select(var, config, *, options: List[str]):
|
async def register_select(var, config, *, options: list[str]):
|
||||||
if not CORE.has_id(config[CONF_ID]):
|
if not CORE.has_id(config[CONF_ID]):
|
||||||
var = cg.Pvariable(config[CONF_ID], var)
|
var = cg.Pvariable(config[CONF_ID], var)
|
||||||
cg.add(cg.App.register_select(var))
|
cg.add(cg.App.register_select(var))
|
||||||
await setup_select_core_(var, config, options=options)
|
await setup_select_core_(var, config, options=options)
|
||||||
|
|
||||||
|
|
||||||
async def new_select(config, *, options: List[str]):
|
async def new_select(config, *, options: list[str]):
|
||||||
var = cg.new_Pvariable(config[CONF_ID])
|
var = cg.new_Pvariable(config[CONF_ID])
|
||||||
await register_select(var, config, options=options)
|
await register_select(var, config, options=options)
|
||||||
return var
|
return var
|
||||||
|
@ -23,7 +23,7 @@ from esphome.core import CORE, EsphomeError
|
|||||||
from esphome.helpers import indent
|
from esphome.helpers import indent
|
||||||
from esphome.util import safe_print, OrderedDict
|
from esphome.util import safe_print, OrderedDict
|
||||||
|
|
||||||
from typing import List, Optional, Tuple, Union
|
from typing import Optional, Union
|
||||||
from esphome.loader import get_component, get_platform, ComponentManifest
|
from esphome.loader import get_component, get_platform, ComponentManifest
|
||||||
from esphome.yaml_util import is_secret, ESPHomeDataBase, ESPForceValue
|
from esphome.yaml_util import is_secret, ESPHomeDataBase, ESPForceValue
|
||||||
from esphome.voluptuous_schema import ExtraKeysInvalid
|
from esphome.voluptuous_schema import ExtraKeysInvalid
|
||||||
@ -50,10 +50,10 @@ def iter_components(config):
|
|||||||
yield p_name, platform, p_config
|
yield p_name, platform, p_config
|
||||||
|
|
||||||
|
|
||||||
ConfigPath = List[Union[str, int]]
|
ConfigPath = list[Union[str, int]]
|
||||||
|
|
||||||
|
|
||||||
def _path_begins_with(path, other): # type: (ConfigPath, ConfigPath) -> bool
|
def _path_begins_with(path: ConfigPath, other: ConfigPath) -> bool:
|
||||||
if len(path) < len(other):
|
if len(path) < len(other):
|
||||||
return False
|
return False
|
||||||
return path[: len(other)] == other
|
return path[: len(other)] == other
|
||||||
@ -67,7 +67,7 @@ class _ValidationStepTask:
|
|||||||
self.step = step
|
self.step = step
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _cmp_tuple(self) -> Tuple[float, int]:
|
def _cmp_tuple(self) -> tuple[float, int]:
|
||||||
return (-self.priority, self.id_number)
|
return (-self.priority, self.id_number)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
@ -84,21 +84,20 @@ class Config(OrderedDict, fv.FinalValidateConfig):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
# A list of voluptuous errors
|
# A list of voluptuous errors
|
||||||
self.errors = [] # type: List[vol.Invalid]
|
self.errors: list[vol.Invalid] = []
|
||||||
# A list of paths that should be fully outputted
|
# A list of paths that should be fully outputted
|
||||||
# The values will be the paths to all "domain", for example (['logger'], 'logger')
|
# The values will be the paths to all "domain", for example (['logger'], 'logger')
|
||||||
# or (['sensor', 'ultrasonic'], 'sensor.ultrasonic')
|
# or (['sensor', 'ultrasonic'], 'sensor.ultrasonic')
|
||||||
self.output_paths = [] # type: List[Tuple[ConfigPath, str]]
|
self.output_paths: list[tuple[ConfigPath, str]] = []
|
||||||
# A list of components ids with the config path
|
# A list of components ids with the config path
|
||||||
self.declare_ids = [] # type: List[Tuple[core.ID, ConfigPath]]
|
self.declare_ids: list[tuple[core.ID, ConfigPath]] = []
|
||||||
self._data = {}
|
self._data = {}
|
||||||
# Store pending validation tasks (in heap order)
|
# Store pending validation tasks (in heap order)
|
||||||
self._validation_tasks: List[_ValidationStepTask] = []
|
self._validation_tasks: list[_ValidationStepTask] = []
|
||||||
# ID to ensure stable order for keys with equal priority
|
# ID to ensure stable order for keys with equal priority
|
||||||
self._validation_tasks_id = 0
|
self._validation_tasks_id = 0
|
||||||
|
|
||||||
def add_error(self, error):
|
def add_error(self, error: vol.Invalid) -> None:
|
||||||
# type: (vol.Invalid) -> None
|
|
||||||
if isinstance(error, vol.MultipleInvalid):
|
if isinstance(error, vol.MultipleInvalid):
|
||||||
for err in error.errors:
|
for err in error.errors:
|
||||||
self.add_error(err)
|
self.add_error(err)
|
||||||
@ -132,20 +131,16 @@ class Config(OrderedDict, fv.FinalValidateConfig):
|
|||||||
e.prepend(path)
|
e.prepend(path)
|
||||||
self.add_error(e)
|
self.add_error(e)
|
||||||
|
|
||||||
def add_str_error(self, message, path):
|
def add_str_error(self, message: str, path: ConfigPath) -> None:
|
||||||
# type: (str, ConfigPath) -> None
|
|
||||||
self.add_error(vol.Invalid(message, path))
|
self.add_error(vol.Invalid(message, path))
|
||||||
|
|
||||||
def add_output_path(self, path, domain):
|
def add_output_path(self, path: ConfigPath, domain: str) -> None:
|
||||||
# type: (ConfigPath, str) -> None
|
|
||||||
self.output_paths.append((path, domain))
|
self.output_paths.append((path, domain))
|
||||||
|
|
||||||
def remove_output_path(self, path, domain):
|
def remove_output_path(self, path: ConfigPath, domain: str) -> None:
|
||||||
# type: (ConfigPath, str) -> None
|
|
||||||
self.output_paths.remove((path, domain))
|
self.output_paths.remove((path, domain))
|
||||||
|
|
||||||
def is_in_error_path(self, path):
|
def is_in_error_path(self, path: ConfigPath) -> bool:
|
||||||
# type: (ConfigPath) -> bool
|
|
||||||
for err in self.errors:
|
for err in self.errors:
|
||||||
if _path_begins_with(err.path, path):
|
if _path_begins_with(err.path, path):
|
||||||
return True
|
return True
|
||||||
@ -157,16 +152,16 @@ class Config(OrderedDict, fv.FinalValidateConfig):
|
|||||||
conf = conf[key]
|
conf = conf[key]
|
||||||
conf[path[-1]] = value
|
conf[path[-1]] = value
|
||||||
|
|
||||||
def get_error_for_path(self, path):
|
def get_error_for_path(self, path: ConfigPath) -> Optional[vol.Invalid]:
|
||||||
# type: (ConfigPath) -> Optional[vol.Invalid]
|
|
||||||
for err in self.errors:
|
for err in self.errors:
|
||||||
if self.get_deepest_path(err.path) == path:
|
if self.get_deepest_path(err.path) == path:
|
||||||
self.errors.remove(err)
|
self.errors.remove(err)
|
||||||
return err
|
return err
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_deepest_document_range_for_path(self, path, get_key=False):
|
def get_deepest_document_range_for_path(
|
||||||
# type: (ConfigPath, bool) -> Optional[ESPHomeDataBase]
|
self, path: ConfigPath, get_key: bool = False
|
||||||
|
) -> Optional[ESPHomeDataBase]:
|
||||||
data = self
|
data = self
|
||||||
doc_range = None
|
doc_range = None
|
||||||
for index, path_item in enumerate(path):
|
for index, path_item in enumerate(path):
|
||||||
@ -207,8 +202,7 @@ class Config(OrderedDict, fv.FinalValidateConfig):
|
|||||||
return {}
|
return {}
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_deepest_path(self, path):
|
def get_deepest_path(self, path: ConfigPath) -> ConfigPath:
|
||||||
# type: (ConfigPath) -> ConfigPath
|
|
||||||
"""Return the path that is the deepest reachable by following path."""
|
"""Return the path that is the deepest reachable by following path."""
|
||||||
data = self
|
data = self
|
||||||
part = []
|
part = []
|
||||||
@ -532,7 +526,7 @@ class IDPassValidationStep(ConfigValidationStep):
|
|||||||
# because the component that did not validate doesn't have any IDs set
|
# because the component that did not validate doesn't have any IDs set
|
||||||
return
|
return
|
||||||
|
|
||||||
searching_ids = [] # type: List[Tuple[core.ID, ConfigPath]]
|
searching_ids: list[tuple[core.ID, ConfigPath]] = []
|
||||||
for id, path in iter_ids(result):
|
for id, path in iter_ids(result):
|
||||||
if id.is_declaration:
|
if id.is_declaration:
|
||||||
if id.id is not None:
|
if id.id is not None:
|
||||||
@ -780,8 +774,7 @@ def _get_parent_name(path, config):
|
|||||||
return path[-1]
|
return path[-1]
|
||||||
|
|
||||||
|
|
||||||
def _format_vol_invalid(ex, config):
|
def _format_vol_invalid(ex: vol.Invalid, config: Config) -> str:
|
||||||
# type: (vol.Invalid, Config) -> str
|
|
||||||
message = ""
|
message = ""
|
||||||
|
|
||||||
paren = _get_parent_name(ex.path[:-1], config)
|
paren = _get_parent_name(ex.path[:-1], config)
|
||||||
@ -862,8 +855,9 @@ def _print_on_next_line(obj):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def dump_dict(config, path, at_root=True):
|
def dump_dict(
|
||||||
# type: (Config, ConfigPath, bool) -> Tuple[str, bool]
|
config: Config, path: ConfigPath, at_root: bool = True
|
||||||
|
) -> tuple[str, bool]:
|
||||||
conf = config.get_nested_item(path)
|
conf = config.get_nested_item(path)
|
||||||
ret = ""
|
ret = ""
|
||||||
multiline = False
|
multiline = False
|
||||||
|
@ -5,8 +5,7 @@ from esphome.core import CORE
|
|||||||
from esphome.helpers import read_file
|
from esphome.helpers import read_file
|
||||||
|
|
||||||
|
|
||||||
def read_config_file(path):
|
def read_config_file(path: str) -> str:
|
||||||
# type: (str) -> str
|
|
||||||
if CORE.vscode and (
|
if CORE.vscode and (
|
||||||
not CORE.ace or os.path.abspath(path) == os.path.abspath(CORE.config_path)
|
not CORE.ace or os.path.abspath(path) == os.path.abspath(CORE.config_path)
|
||||||
):
|
):
|
||||||
|
@ -2,7 +2,7 @@ import logging
|
|||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, Union
|
from typing import TYPE_CHECKING, Optional, Union
|
||||||
|
|
||||||
from esphome.const import (
|
from esphome.const import (
|
||||||
CONF_COMMENT,
|
CONF_COMMENT,
|
||||||
@ -469,19 +469,19 @@ class EsphomeCore:
|
|||||||
# Task counter for pending tasks
|
# Task counter for pending tasks
|
||||||
self.task_counter = 0
|
self.task_counter = 0
|
||||||
# The variable cache, for each ID this holds a MockObj of the variable obj
|
# The variable cache, for each ID this holds a MockObj of the variable obj
|
||||||
self.variables: Dict[str, "MockObj"] = {}
|
self.variables: dict[str, "MockObj"] = {}
|
||||||
# A list of statements that go in the main setup() block
|
# A list of statements that go in the main setup() block
|
||||||
self.main_statements: List["Statement"] = []
|
self.main_statements: list["Statement"] = []
|
||||||
# A list of statements to insert in the global block (includes and global variables)
|
# A list of statements to insert in the global block (includes and global variables)
|
||||||
self.global_statements: List["Statement"] = []
|
self.global_statements: list["Statement"] = []
|
||||||
# A set of platformio libraries to add to the project
|
# A set of platformio libraries to add to the project
|
||||||
self.libraries: List[Library] = []
|
self.libraries: list[Library] = []
|
||||||
# A set of build flags to set in the platformio project
|
# A set of build flags to set in the platformio project
|
||||||
self.build_flags: Set[str] = set()
|
self.build_flags: set[str] = set()
|
||||||
# A set of defines to set for the compile process in esphome/core/defines.h
|
# A set of defines to set for the compile process in esphome/core/defines.h
|
||||||
self.defines: Set["Define"] = set()
|
self.defines: set["Define"] = set()
|
||||||
# A map of all platformio options to apply
|
# A map of all platformio options to apply
|
||||||
self.platformio_options: Dict[str, Union[str, List[str]]] = {}
|
self.platformio_options: dict[str, Union[str, list[str]]] = {}
|
||||||
# A set of strings of names of loaded integrations, used to find namespace ID conflicts
|
# A set of strings of names of loaded integrations, used to find namespace ID conflicts
|
||||||
self.loaded_integrations = set()
|
self.loaded_integrations = set()
|
||||||
# A set of component IDs to track what Component subclasses are declared
|
# A set of component IDs to track what Component subclasses are declared
|
||||||
@ -701,7 +701,7 @@ class EsphomeCore:
|
|||||||
_LOGGER.debug("Adding define: %s", define)
|
_LOGGER.debug("Adding define: %s", define)
|
||||||
return define
|
return define
|
||||||
|
|
||||||
def add_platformio_option(self, key: str, value: Union[str, List[str]]) -> None:
|
def add_platformio_option(self, key: str, value: Union[str, list[str]]) -> None:
|
||||||
new_val = value
|
new_val = value
|
||||||
old_val = self.platformio_options.get(key)
|
old_val = self.platformio_options.get(key)
|
||||||
if isinstance(old_val, list):
|
if isinstance(old_val, list):
|
||||||
@ -734,7 +734,7 @@ class EsphomeCore:
|
|||||||
_LOGGER.debug("Waiting for variable %s", id)
|
_LOGGER.debug("Waiting for variable %s", id)
|
||||||
yield
|
yield
|
||||||
|
|
||||||
async def get_variable_with_full_id(self, id: ID) -> Tuple[ID, "MockObj"]:
|
async def get_variable_with_full_id(self, id: ID) -> tuple[ID, "MockObj"]:
|
||||||
if not isinstance(id, ID):
|
if not isinstance(id, ID):
|
||||||
raise ValueError(f"ID {id!r} must be of type ID!")
|
raise ValueError(f"ID {id!r} must be of type ID!")
|
||||||
return await _FakeAwaitable(self._get_variable_with_full_id_generator(id))
|
return await _FakeAwaitable(self._get_variable_with_full_id_generator(id))
|
||||||
|
@ -48,7 +48,8 @@ import heapq
|
|||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import types
|
import types
|
||||||
from typing import Any, Awaitable, Callable, Generator, Iterator, List, Tuple
|
from typing import Any, Callable
|
||||||
|
from collections.abc import Awaitable, Generator, Iterator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -177,7 +178,7 @@ class _Task:
|
|||||||
return _Task(priority, self.id_number, self.iterator, self.original_function)
|
return _Task(priority, self.id_number, self.iterator, self.original_function)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _cmp_tuple(self) -> Tuple[float, int]:
|
def _cmp_tuple(self) -> tuple[float, int]:
|
||||||
return (-self.priority, self.id_number)
|
return (-self.priority, self.id_number)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
@ -194,7 +195,7 @@ class FakeEventLoop:
|
|||||||
"""Emulate an asyncio EventLoop to run some registered coroutine jobs in sequence."""
|
"""Emulate an asyncio EventLoop to run some registered coroutine jobs in sequence."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._pending_tasks: List[_Task] = []
|
self._pending_tasks: list[_Task] = []
|
||||||
self._task_counter = 0
|
self._task_counter = 0
|
||||||
|
|
||||||
def add_job(self, func, *args, **kwargs):
|
def add_job(self, func, *args, **kwargs):
|
||||||
|
@ -8,14 +8,10 @@ from esphome.yaml_util import ESPHomeDataBase
|
|||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Callable,
|
Callable,
|
||||||
Generator,
|
|
||||||
List,
|
|
||||||
Optional,
|
Optional,
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
Union,
|
||||||
Sequence,
|
|
||||||
)
|
)
|
||||||
|
from collections.abc import Generator, Sequence
|
||||||
|
|
||||||
from esphome.core import ( # noqa
|
from esphome.core import ( # noqa
|
||||||
CORE,
|
CORE,
|
||||||
@ -54,9 +50,9 @@ SafeExpType = Union[
|
|||||||
int,
|
int,
|
||||||
float,
|
float,
|
||||||
TimePeriod,
|
TimePeriod,
|
||||||
Type[bool],
|
type[bool],
|
||||||
Type[int],
|
type[int],
|
||||||
Type[float],
|
type[float],
|
||||||
Sequence[Any],
|
Sequence[Any],
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -150,7 +146,7 @@ class CallExpression(Expression):
|
|||||||
class StructInitializer(Expression):
|
class StructInitializer(Expression):
|
||||||
__slots__ = ("base", "args")
|
__slots__ = ("base", "args")
|
||||||
|
|
||||||
def __init__(self, base: Expression, *args: Tuple[str, Optional[SafeExpType]]):
|
def __init__(self, base: Expression, *args: tuple[str, Optional[SafeExpType]]):
|
||||||
self.base = base
|
self.base = base
|
||||||
# TODO: args is always a Tuple, is this check required?
|
# TODO: args is always a Tuple, is this check required?
|
||||||
if not isinstance(args, OrderedDict):
|
if not isinstance(args, OrderedDict):
|
||||||
@ -210,7 +206,7 @@ class ParameterListExpression(Expression):
|
|||||||
__slots__ = ("parameters",)
|
__slots__ = ("parameters",)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, *parameters: Union[ParameterExpression, Tuple[SafeExpType, str]]
|
self, *parameters: Union[ParameterExpression, tuple[SafeExpType, str]]
|
||||||
):
|
):
|
||||||
self.parameters = []
|
self.parameters = []
|
||||||
for parameter in parameters:
|
for parameter in parameters:
|
||||||
@ -629,7 +625,7 @@ def add_define(name: str, value: SafeExpType = None):
|
|||||||
CORE.add_define(Define(name, safe_exp(value)))
|
CORE.add_define(Define(name, safe_exp(value)))
|
||||||
|
|
||||||
|
|
||||||
def add_platformio_option(key: str, value: Union[str, List[str]]):
|
def add_platformio_option(key: str, value: Union[str, list[str]]):
|
||||||
CORE.add_platformio_option(key, value)
|
CORE.add_platformio_option(key, value)
|
||||||
|
|
||||||
|
|
||||||
@ -646,7 +642,7 @@ async def get_variable(id_: ID) -> "MockObj":
|
|||||||
return await CORE.get_variable(id_)
|
return await CORE.get_variable(id_)
|
||||||
|
|
||||||
|
|
||||||
async def get_variable_with_full_id(id_: ID) -> Tuple[ID, "MockObj"]:
|
async def get_variable_with_full_id(id_: ID) -> tuple[ID, "MockObj"]:
|
||||||
"""
|
"""
|
||||||
Wait for the given ID to be defined in the code generation and
|
Wait for the given ID to be defined in the code generation and
|
||||||
return it as a MockObj.
|
return it as a MockObj.
|
||||||
@ -661,7 +657,7 @@ async def get_variable_with_full_id(id_: ID) -> Tuple[ID, "MockObj"]:
|
|||||||
|
|
||||||
async def process_lambda(
|
async def process_lambda(
|
||||||
value: Lambda,
|
value: Lambda,
|
||||||
parameters: List[Tuple[SafeExpType, str]],
|
parameters: list[tuple[SafeExpType, str]],
|
||||||
capture: str = "=",
|
capture: str = "=",
|
||||||
return_type: SafeExpType = None,
|
return_type: SafeExpType = None,
|
||||||
) -> Generator[LambdaExpression, None, None]:
|
) -> Generator[LambdaExpression, None, None]:
|
||||||
@ -715,7 +711,7 @@ def is_template(value):
|
|||||||
|
|
||||||
async def templatable(
|
async def templatable(
|
||||||
value: Any,
|
value: Any,
|
||||||
args: List[Tuple[SafeExpType, str]],
|
args: list[tuple[SafeExpType, str]],
|
||||||
output_type: Optional[SafeExpType],
|
output_type: Optional[SafeExpType],
|
||||||
to_exp: Any = None,
|
to_exp: Any = None,
|
||||||
):
|
):
|
||||||
@ -763,7 +759,7 @@ class MockObj(Expression):
|
|||||||
attr = attr[1:]
|
attr = attr[1:]
|
||||||
return MockObj(f"{self.base}{self.op}{attr}", next_op)
|
return MockObj(f"{self.base}{self.op}{attr}", next_op)
|
||||||
|
|
||||||
def __call__(self, *args): # type: (SafeExpType) -> MockObj
|
def __call__(self, *args: SafeExpType) -> "MockObj":
|
||||||
call = CallExpression(self.base, *args)
|
call = CallExpression(self.base, *args)
|
||||||
return MockObj(call, self.op)
|
return MockObj(call, self.op)
|
||||||
|
|
||||||
|
@ -107,8 +107,9 @@ async def setup_entity(var, config):
|
|||||||
add(var.set_entity_category(config[CONF_ENTITY_CATEGORY]))
|
add(var.set_entity_category(config[CONF_ENTITY_CATEGORY]))
|
||||||
|
|
||||||
|
|
||||||
def extract_registry_entry_config(registry, full_config):
|
def extract_registry_entry_config(
|
||||||
# type: (Registry, ConfigType) -> RegistryEntry
|
registry: Registry, full_config: ConfigType
|
||||||
|
) -> RegistryEntry:
|
||||||
key, config = next((k, v) for k, v in full_config.items() if k in registry)
|
key, config = next((k, v) for k, v in full_config.items() if k in registry)
|
||||||
return registry[key], config
|
return registry[key], config
|
||||||
|
|
||||||
|
@ -522,7 +522,7 @@ class DashboardEntry:
|
|||||||
return os.path.basename(self.path)
|
return os.path.basename(self.path)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def storage(self): # type: () -> Optional[StorageJSON]
|
def storage(self) -> Optional[StorageJSON]:
|
||||||
if not self._loaded_storage:
|
if not self._loaded_storage:
|
||||||
self._storage = StorageJSON.load(
|
self._storage = StorageJSON.load(
|
||||||
ext_storage_path(settings.config_dir, self.filename)
|
ext_storage_path(settings.config_dir, self.filename)
|
||||||
@ -817,7 +817,7 @@ class UndoDeleteRequestHandler(BaseHandler):
|
|||||||
shutil.move(os.path.join(trash_path, configuration), config_file)
|
shutil.move(os.path.join(trash_path, configuration), config_file)
|
||||||
|
|
||||||
|
|
||||||
PING_RESULT = {} # type: dict
|
PING_RESULT: dict = {}
|
||||||
IMPORT_RESULT = {}
|
IMPORT_RESULT = {}
|
||||||
STOP_EVENT = threading.Event()
|
STOP_EVENT = threading.Event()
|
||||||
PING_REQUEST = threading.Event()
|
PING_REQUEST = threading.Event()
|
||||||
@ -933,7 +933,7 @@ def get_static_path(*args):
|
|||||||
return os.path.join(get_base_frontend_path(), "static", *args)
|
return os.path.join(get_base_frontend_path(), "static", *args)
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.cache
|
||||||
def get_static_file_url(name):
|
def get_static_file_url(name):
|
||||||
base = f"./static/{name}"
|
base = f"./static/{name}"
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Dict, Any
|
from typing import Any
|
||||||
import contextvars
|
import contextvars
|
||||||
|
|
||||||
from esphome.types import ConfigFragmentType, ID, ConfigPathType
|
from esphome.types import ConfigFragmentType, ID, ConfigPathType
|
||||||
@ -9,7 +9,7 @@ import esphome.config_validation as cv
|
|||||||
class FinalValidateConfig(ABC):
|
class FinalValidateConfig(ABC):
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def data(self) -> Dict[str, Any]:
|
def data(self) -> dict[str, Any]:
|
||||||
"""A dictionary that can be used by post validation functions to store
|
"""A dictionary that can be used by post validation functions to store
|
||||||
global data during the validation phase. Each component should store its
|
global data during the validation phase. Each component should store its
|
||||||
data under a unique key
|
data under a unique key
|
||||||
|
@ -40,7 +40,7 @@ def indent(text, padding=" "):
|
|||||||
|
|
||||||
# From https://stackoverflow.com/a/14945195/8924614
|
# From https://stackoverflow.com/a/14945195/8924614
|
||||||
def cpp_string_escape(string, encoding="utf-8"):
|
def cpp_string_escape(string, encoding="utf-8"):
|
||||||
def _should_escape(byte): # type: (int) -> bool
|
def _should_escape(byte: int) -> bool:
|
||||||
if not 32 <= byte < 127:
|
if not 32 <= byte < 127:
|
||||||
return True
|
return True
|
||||||
if byte in (ord("\\"), ord('"')):
|
if byte in (ord("\\"), ord('"')):
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
from typing import Callable, List, Optional, Any, ContextManager
|
from typing import Callable, Optional, Any, ContextManager
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
import importlib
|
import importlib
|
||||||
import importlib.util
|
import importlib.util
|
||||||
@ -62,19 +62,19 @@ class ComponentManifest:
|
|||||||
return getattr(self.module, "to_code", None)
|
return getattr(self.module, "to_code", None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dependencies(self) -> List[str]:
|
def dependencies(self) -> list[str]:
|
||||||
return getattr(self.module, "DEPENDENCIES", [])
|
return getattr(self.module, "DEPENDENCIES", [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def conflicts_with(self) -> List[str]:
|
def conflicts_with(self) -> list[str]:
|
||||||
return getattr(self.module, "CONFLICTS_WITH", [])
|
return getattr(self.module, "CONFLICTS_WITH", [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auto_load(self) -> List[str]:
|
def auto_load(self) -> list[str]:
|
||||||
return getattr(self.module, "AUTO_LOAD", [])
|
return getattr(self.module, "AUTO_LOAD", [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def codeowners(self) -> List[str]:
|
def codeowners(self) -> list[str]:
|
||||||
return getattr(self.module, "CODEOWNERS", [])
|
return getattr(self.module, "CODEOWNERS", [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -87,7 +87,7 @@ class ComponentManifest:
|
|||||||
return getattr(self.module, "FINAL_VALIDATE_SCHEMA", None)
|
return getattr(self.module, "FINAL_VALIDATE_SCHEMA", None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def resources(self) -> List[FileResource]:
|
def resources(self) -> list[FileResource]:
|
||||||
"""Return a list of all file resources defined in the package of this component.
|
"""Return a list of all file resources defined in the package of this component.
|
||||||
|
|
||||||
This will return all cpp source files that are located in the same folder as the
|
This will return all cpp source files that are located in the same folder as the
|
||||||
@ -106,7 +106,7 @@ class ComponentManifest:
|
|||||||
|
|
||||||
class ComponentMetaFinder(importlib.abc.MetaPathFinder):
|
class ComponentMetaFinder(importlib.abc.MetaPathFinder):
|
||||||
def __init__(
|
def __init__(
|
||||||
self, components_path: Path, allowed_components: Optional[List[str]] = None
|
self, components_path: Path, allowed_components: Optional[list[str]] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
self._allowed_components = allowed_components
|
self._allowed_components = allowed_components
|
||||||
self._finders = []
|
self._finders = []
|
||||||
@ -117,7 +117,7 @@ class ComponentMetaFinder(importlib.abc.MetaPathFinder):
|
|||||||
continue
|
continue
|
||||||
self._finders.append(finder)
|
self._finders.append(finder)
|
||||||
|
|
||||||
def find_spec(self, fullname: str, path: Optional[List[str]], target=None):
|
def find_spec(self, fullname: str, path: Optional[list[str]], target=None):
|
||||||
if not fullname.startswith("esphome.components."):
|
if not fullname.startswith("esphome.components."):
|
||||||
return None
|
return None
|
||||||
parts = fullname.split(".")
|
parts = fullname.split(".")
|
||||||
@ -144,7 +144,7 @@ def clear_component_meta_finders():
|
|||||||
|
|
||||||
|
|
||||||
def install_meta_finder(
|
def install_meta_finder(
|
||||||
components_path: Path, allowed_components: Optional[List[str]] = None
|
components_path: Path, allowed_components: Optional[list[str]] = None
|
||||||
):
|
):
|
||||||
sys.meta_path.insert(0, ComponentMetaFinder(components_path, allowed_components))
|
sys.meta_path.insert(0, ComponentMetaFinder(components_path, allowed_components))
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import json
|
import json
|
||||||
from typing import List, Union
|
from typing import Union
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -310,7 +310,7 @@ class IDEData:
|
|||||||
return str(Path(self.firmware_elf_path).with_suffix(".bin"))
|
return str(Path(self.firmware_elf_path).with_suffix(".bin"))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extra_flash_images(self) -> List[FlashImage]:
|
def extra_flash_images(self) -> list[FlashImage]:
|
||||||
return [
|
return [
|
||||||
FlashImage(path=entry["path"], offset=entry["offset"])
|
FlashImage(path=entry["path"], offset=entry["offset"])
|
||||||
for entry in self.raw["extra"]["flash_images"]
|
for entry in self.raw["extra"]["flash_images"]
|
||||||
|
@ -4,7 +4,7 @@ from datetime import datetime
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from typing import Any, Optional, List
|
from typing import Optional
|
||||||
|
|
||||||
from esphome import const
|
from esphome import const
|
||||||
from esphome.core import CORE
|
from esphome.core import CORE
|
||||||
@ -15,19 +15,19 @@ from esphome.types import CoreType
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def storage_path(): # type: () -> str
|
def storage_path() -> str:
|
||||||
return CORE.relative_internal_path(f"{CORE.config_filename}.json")
|
return CORE.relative_internal_path(f"{CORE.config_filename}.json")
|
||||||
|
|
||||||
|
|
||||||
def ext_storage_path(base_path, config_filename): # type: (str, str) -> str
|
def ext_storage_path(base_path: str, config_filename: str) -> str:
|
||||||
return os.path.join(base_path, ".esphome", f"{config_filename}.json")
|
return os.path.join(base_path, ".esphome", f"{config_filename}.json")
|
||||||
|
|
||||||
|
|
||||||
def esphome_storage_path(base_path): # type: (str) -> str
|
def esphome_storage_path(base_path: str) -> str:
|
||||||
return os.path.join(base_path, ".esphome", "esphome.json")
|
return os.path.join(base_path, ".esphome", "esphome.json")
|
||||||
|
|
||||||
|
|
||||||
def trash_storage_path(base_path): # type: (str) -> str
|
def trash_storage_path(base_path: str) -> str:
|
||||||
return os.path.join(base_path, ".esphome", "trash")
|
return os.path.join(base_path, ".esphome", "trash")
|
||||||
|
|
||||||
|
|
||||||
@ -49,29 +49,29 @@ class StorageJSON:
|
|||||||
):
|
):
|
||||||
# Version of the storage JSON schema
|
# Version of the storage JSON schema
|
||||||
assert storage_version is None or isinstance(storage_version, int)
|
assert storage_version is None or isinstance(storage_version, int)
|
||||||
self.storage_version = storage_version # type: int
|
self.storage_version: int = storage_version
|
||||||
# The name of the node
|
# The name of the node
|
||||||
self.name = name # type: str
|
self.name: str = name
|
||||||
# The comment of the node
|
# The comment of the node
|
||||||
self.comment = comment # type: str
|
self.comment: str = comment
|
||||||
# The esphome version this was compiled with
|
# The esphome version this was compiled with
|
||||||
self.esphome_version = esphome_version # type: str
|
self.esphome_version: str = esphome_version
|
||||||
# The version of the file in src/main.cpp - Used to migrate the file
|
# The version of the file in src/main.cpp - Used to migrate the file
|
||||||
assert src_version is None or isinstance(src_version, int)
|
assert src_version is None or isinstance(src_version, int)
|
||||||
self.src_version = src_version # type: int
|
self.src_version: int = src_version
|
||||||
# Address of the ESP, for example livingroom.local or a static IP
|
# Address of the ESP, for example livingroom.local or a static IP
|
||||||
self.address = address # type: str
|
self.address: str = address
|
||||||
# Web server port of the ESP, for example 80
|
# Web server port of the ESP, for example 80
|
||||||
assert web_port is None or isinstance(web_port, int)
|
assert web_port is None or isinstance(web_port, int)
|
||||||
self.web_port = web_port # type: int
|
self.web_port: int = web_port
|
||||||
# The type of hardware in use, like "ESP32", "ESP32C3", "ESP8266", etc.
|
# The type of hardware in use, like "ESP32", "ESP32C3", "ESP8266", etc.
|
||||||
self.target_platform = target_platform # type: str
|
self.target_platform: str = target_platform
|
||||||
# The absolute path to the platformio project
|
# The absolute path to the platformio project
|
||||||
self.build_path = build_path # type: str
|
self.build_path: str = build_path
|
||||||
# The absolute path to the firmware binary
|
# The absolute path to the firmware binary
|
||||||
self.firmware_bin_path = firmware_bin_path # type: str
|
self.firmware_bin_path: str = firmware_bin_path
|
||||||
# A list of strings of names of loaded integrations
|
# A list of strings of names of loaded integrations
|
||||||
self.loaded_integrations = loaded_integrations # type: List[str]
|
self.loaded_integrations: list[str] = loaded_integrations
|
||||||
self.loaded_integrations.sort()
|
self.loaded_integrations.sort()
|
||||||
|
|
||||||
def as_dict(self):
|
def as_dict(self):
|
||||||
@ -97,8 +97,8 @@ class StorageJSON:
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_esphome_core(
|
def from_esphome_core(
|
||||||
esph, old
|
esph: CoreType, old: Optional["StorageJSON"]
|
||||||
): # type: (CoreType, Optional[StorageJSON]) -> StorageJSON
|
) -> "StorageJSON":
|
||||||
hardware = esph.target_platform.upper()
|
hardware = esph.target_platform.upper()
|
||||||
if esph.is_esp32:
|
if esph.is_esp32:
|
||||||
from esphome.components import esp32
|
from esphome.components import esp32
|
||||||
@ -135,7 +135,7 @@ class StorageJSON:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _load_impl(path): # type: (str) -> Optional[StorageJSON]
|
def _load_impl(path: str) -> Optional["StorageJSON"]:
|
||||||
with codecs.open(path, "r", encoding="utf-8") as f_handle:
|
with codecs.open(path, "r", encoding="utf-8") as f_handle:
|
||||||
storage = json.load(f_handle)
|
storage = json.load(f_handle)
|
||||||
storage_version = storage["storage_version"]
|
storage_version = storage["storage_version"]
|
||||||
@ -166,13 +166,13 @@ class StorageJSON:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load(path): # type: (str) -> Optional[StorageJSON]
|
def load(path: str) -> Optional["StorageJSON"]:
|
||||||
try:
|
try:
|
||||||
return StorageJSON._load_impl(path)
|
return StorageJSON._load_impl(path)
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def __eq__(self, o): # type: (Any) -> bool
|
def __eq__(self, o) -> bool:
|
||||||
return isinstance(o, StorageJSON) and self.as_dict() == o.as_dict()
|
return isinstance(o, StorageJSON) and self.as_dict() == o.as_dict()
|
||||||
|
|
||||||
|
|
||||||
@ -182,15 +182,15 @@ class EsphomeStorageJSON:
|
|||||||
):
|
):
|
||||||
# Version of the storage JSON schema
|
# Version of the storage JSON schema
|
||||||
assert storage_version is None or isinstance(storage_version, int)
|
assert storage_version is None or isinstance(storage_version, int)
|
||||||
self.storage_version = storage_version # type: int
|
self.storage_version: int = storage_version
|
||||||
# The cookie secret for the dashboard
|
# The cookie secret for the dashboard
|
||||||
self.cookie_secret = cookie_secret # type: str
|
self.cookie_secret: str = cookie_secret
|
||||||
# The last time ESPHome checked for an update as an isoformat encoded str
|
# The last time ESPHome checked for an update as an isoformat encoded str
|
||||||
self.last_update_check_str = last_update_check # type: str
|
self.last_update_check_str: str = last_update_check
|
||||||
# Cache of the version gotten in the last version check
|
# Cache of the version gotten in the last version check
|
||||||
self.remote_version = remote_version # type: Optional[str]
|
self.remote_version: Optional[str] = remote_version
|
||||||
|
|
||||||
def as_dict(self): # type: () -> dict
|
def as_dict(self) -> dict:
|
||||||
return {
|
return {
|
||||||
"storage_version": self.storage_version,
|
"storage_version": self.storage_version,
|
||||||
"cookie_secret": self.cookie_secret,
|
"cookie_secret": self.cookie_secret,
|
||||||
@ -199,24 +199,24 @@ class EsphomeStorageJSON:
|
|||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_update_check(self): # type: () -> Optional[datetime]
|
def last_update_check(self) -> Optional[datetime]:
|
||||||
try:
|
try:
|
||||||
return datetime.strptime(self.last_update_check_str, "%Y-%m-%dT%H:%M:%S")
|
return datetime.strptime(self.last_update_check_str, "%Y-%m-%dT%H:%M:%S")
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@last_update_check.setter
|
@last_update_check.setter
|
||||||
def last_update_check(self, new): # type: (datetime) -> None
|
def last_update_check(self, new: datetime) -> None:
|
||||||
self.last_update_check_str = new.strftime("%Y-%m-%dT%H:%M:%S")
|
self.last_update_check_str = new.strftime("%Y-%m-%dT%H:%M:%S")
|
||||||
|
|
||||||
def to_json(self): # type: () -> dict
|
def to_json(self) -> dict:
|
||||||
return f"{json.dumps(self.as_dict(), indent=2)}\n"
|
return f"{json.dumps(self.as_dict(), indent=2)}\n"
|
||||||
|
|
||||||
def save(self, path): # type: (str) -> None
|
def save(self, path: str) -> None:
|
||||||
write_file_if_changed(path, self.to_json())
|
write_file_if_changed(path, self.to_json())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _load_impl(path): # type: (str) -> Optional[EsphomeStorageJSON]
|
def _load_impl(path: str) -> Optional["EsphomeStorageJSON"]:
|
||||||
with codecs.open(path, "r", encoding="utf-8") as f_handle:
|
with codecs.open(path, "r", encoding="utf-8") as f_handle:
|
||||||
storage = json.load(f_handle)
|
storage = json.load(f_handle)
|
||||||
storage_version = storage["storage_version"]
|
storage_version = storage["storage_version"]
|
||||||
@ -228,14 +228,14 @@ class EsphomeStorageJSON:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load(path): # type: (str) -> Optional[EsphomeStorageJSON]
|
def load(path: str) -> Optional["EsphomeStorageJSON"]:
|
||||||
try:
|
try:
|
||||||
return EsphomeStorageJSON._load_impl(path)
|
return EsphomeStorageJSON._load_impl(path)
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_default(): # type: () -> EsphomeStorageJSON
|
def get_default() -> "EsphomeStorageJSON":
|
||||||
return EsphomeStorageJSON(
|
return EsphomeStorageJSON(
|
||||||
storage_version=1,
|
storage_version=1,
|
||||||
cookie_secret=binascii.hexlify(os.urandom(64)).decode(),
|
cookie_secret=binascii.hexlify(os.urandom(64)).decode(),
|
||||||
@ -243,5 +243,5 @@ class EsphomeStorageJSON:
|
|||||||
remote_version=None,
|
remote_version=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __eq__(self, o): # type: (Any) -> bool
|
def __eq__(self, o) -> bool:
|
||||||
return isinstance(o, EsphomeStorageJSON) and self.as_dict() == o.as_dict()
|
return isinstance(o, EsphomeStorageJSON) and self.as_dict() == o.as_dict()
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
"""This helper module tracks commonly used types in the esphome python codebase."""
|
"""This helper module tracks commonly used types in the esphome python codebase."""
|
||||||
from typing import Dict, Union, List
|
from typing import Union
|
||||||
|
|
||||||
from esphome.core import ID, Lambda, EsphomeCore
|
from esphome.core import ID, Lambda, EsphomeCore
|
||||||
|
|
||||||
@ -8,11 +8,11 @@ ConfigFragmentType = Union[
|
|||||||
int,
|
int,
|
||||||
float,
|
float,
|
||||||
None,
|
None,
|
||||||
Dict[Union[str, int], "ConfigFragmentType"],
|
dict[Union[str, int], "ConfigFragmentType"],
|
||||||
List["ConfigFragmentType"],
|
list["ConfigFragmentType"],
|
||||||
ID,
|
ID,
|
||||||
Lambda,
|
Lambda,
|
||||||
]
|
]
|
||||||
ConfigType = Dict[str, ConfigFragmentType]
|
ConfigType = dict[str, ConfigFragmentType]
|
||||||
CoreType = EsphomeCore
|
CoreType = EsphomeCore
|
||||||
ConfigPathType = Union[str, int]
|
ConfigPathType = Union[str, int]
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import typing
|
from typing import Union
|
||||||
from typing import Union, List
|
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import io
|
import io
|
||||||
@ -242,7 +241,7 @@ def is_dev_esphome_version():
|
|||||||
return "dev" in const.__version__
|
return "dev" in const.__version__
|
||||||
|
|
||||||
|
|
||||||
def parse_esphome_version() -> typing.Tuple[int, int, int]:
|
def parse_esphome_version() -> tuple[int, int, int]:
|
||||||
match = re.match(r"^(\d+).(\d+).(\d+)(-dev\d*|b\d*)?$", const.__version__)
|
match = re.match(r"^(\d+).(\d+).(\d+)(-dev\d*|b\d*)?$", const.__version__)
|
||||||
if match is None:
|
if match is None:
|
||||||
raise ValueError(f"Failed to parse ESPHome version '{const.__version__}'")
|
raise ValueError(f"Failed to parse ESPHome version '{const.__version__}'")
|
||||||
@ -282,7 +281,7 @@ class SerialPort:
|
|||||||
|
|
||||||
|
|
||||||
# from https://github.com/pyserial/pyserial/blob/master/serial/tools/list_ports.py
|
# from https://github.com/pyserial/pyserial/blob/master/serial/tools/list_ports.py
|
||||||
def get_serial_ports() -> List[SerialPort]:
|
def get_serial_ports() -> list[SerialPort]:
|
||||||
from serial.tools.list_ports import comports
|
from serial.tools.list_ports import comports
|
||||||
|
|
||||||
result = []
|
result = []
|
||||||
|
@ -10,15 +10,13 @@ import esphome.config_validation as cv
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
|
||||||
def _get_invalid_range(res, invalid):
|
def _get_invalid_range(res: Config, invalid: cv.Invalid) -> Optional[DocumentRange]:
|
||||||
# type: (Config, cv.Invalid) -> Optional[DocumentRange]
|
|
||||||
return res.get_deepest_document_range_for_path(
|
return res.get_deepest_document_range_for_path(
|
||||||
invalid.path, invalid.error_message == "extra keys not allowed"
|
invalid.path, invalid.error_message == "extra keys not allowed"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _dump_range(range):
|
def _dump_range(range: Optional[DocumentRange]) -> Optional[dict]:
|
||||||
# type: (Optional[DocumentRange]) -> Optional[dict]
|
|
||||||
if range is None:
|
if range is None:
|
||||||
return None
|
return None
|
||||||
return {
|
return {
|
||||||
|
@ -2,7 +2,7 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Dict, List, Union
|
from typing import Union
|
||||||
|
|
||||||
from esphome.config import iter_components
|
from esphome.config import iter_components
|
||||||
from esphome.const import (
|
from esphome.const import (
|
||||||
@ -98,7 +98,7 @@ def replace_file_content(text, pattern, repl):
|
|||||||
return content_new, count
|
return content_new, count
|
||||||
|
|
||||||
|
|
||||||
def storage_should_clean(old, new): # type: (StorageJSON, StorageJSON) -> bool
|
def storage_should_clean(old: StorageJSON, new: StorageJSON) -> bool:
|
||||||
if old is None:
|
if old is None:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -123,7 +123,7 @@ def update_storage_json():
|
|||||||
new.save(path)
|
new.save(path)
|
||||||
|
|
||||||
|
|
||||||
def format_ini(data: Dict[str, Union[str, List[str]]]) -> str:
|
def format_ini(data: dict[str, Union[str, list[str]]]) -> str:
|
||||||
content = ""
|
content = ""
|
||||||
for key, value in sorted(data.items()):
|
for key, value in sorted(data.items()):
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
@ -226,7 +226,7 @@ the custom_components folder or the external_components feature.
|
|||||||
|
|
||||||
|
|
||||||
def copy_src_tree():
|
def copy_src_tree():
|
||||||
source_files: List[loader.FileResource] = []
|
source_files: list[loader.FileResource] = []
|
||||||
for _, component, _ in iter_components(CORE.config):
|
for _, component, _ in iter_components(CORE.config):
|
||||||
source_files += component.resources
|
source_files += component.resources
|
||||||
source_files_map = {
|
source_files_map = {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import socket
|
import socket
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from typing import Dict, Optional
|
from typing import Optional
|
||||||
import logging
|
import logging
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
@ -71,12 +71,12 @@ class DashboardStatus(threading.Thread):
|
|||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.zc = zc
|
self.zc = zc
|
||||||
self.query_hosts: set[str] = set()
|
self.query_hosts: set[str] = set()
|
||||||
self.key_to_host: Dict[str, str] = {}
|
self.key_to_host: dict[str, str] = {}
|
||||||
self.stop_event = threading.Event()
|
self.stop_event = threading.Event()
|
||||||
self.query_event = threading.Event()
|
self.query_event = threading.Event()
|
||||||
self.on_update = on_update
|
self.on_update = on_update
|
||||||
|
|
||||||
def request_query(self, hosts: Dict[str, str]) -> None:
|
def request_query(self, hosts: dict[str, str]) -> None:
|
||||||
self.query_hosts = set(hosts.values())
|
self.query_hosts = set(hosts.values())
|
||||||
self.key_to_host = hosts
|
self.key_to_host = hosts
|
||||||
self.query_event.set()
|
self.query_event.set()
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
[tool.black]
|
[tool.black]
|
||||||
target-version = ["py36", "py37", "py38"]
|
target-version = ["py39", "py310"]
|
||||||
exclude = 'generated'
|
exclude = 'generated'
|
||||||
|
@ -109,7 +109,7 @@ def main():
|
|||||||
print_error(file_, linno, msg)
|
print_error(file_, linno, msg)
|
||||||
errors += 1
|
errors += 1
|
||||||
|
|
||||||
PYUPGRADE_TARGET = "--py38-plus"
|
PYUPGRADE_TARGET = "--py39-plus"
|
||||||
cmd = ["pyupgrade", PYUPGRADE_TARGET] + files
|
cmd = ["pyupgrade", PYUPGRADE_TARGET] + files
|
||||||
print()
|
print()
|
||||||
print("Running pyupgrade...")
|
print("Running pyupgrade...")
|
||||||
|
2
setup.py
2
setup.py
@ -74,7 +74,7 @@ setup(
|
|||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
platforms="any",
|
platforms="any",
|
||||||
test_suite="tests",
|
test_suite="tests",
|
||||||
python_requires=">=3.8,<4.0",
|
python_requires=">=3.9.0",
|
||||||
install_requires=REQUIRES,
|
install_requires=REQUIRES,
|
||||||
keywords=["home", "automation"],
|
keywords=["home", "automation"],
|
||||||
entry_points={"console_scripts": ["esphome = esphome.__main__:main"]},
|
entry_points={"console_scripts": ["esphome = esphome.__main__:main"]},
|
||||||
|
@ -1,12 +1,9 @@
|
|||||||
from typing import Text
|
|
||||||
|
|
||||||
import hypothesis.strategies._internal.core as st
|
import hypothesis.strategies._internal.core as st
|
||||||
from hypothesis.strategies._internal.strategies import SearchStrategy
|
from hypothesis.strategies._internal.strategies import SearchStrategy
|
||||||
|
|
||||||
|
|
||||||
@st.defines_strategy(force_reusable_values=True)
|
@st.defines_strategy(force_reusable_values=True)
|
||||||
def mac_addr_strings():
|
def mac_addr_strings() -> SearchStrategy[str]:
|
||||||
# type: () -> SearchStrategy[Text]
|
|
||||||
"""A strategy for MAC address strings.
|
"""A strategy for MAC address strings.
|
||||||
|
|
||||||
This consists of six strings representing integers [0..255],
|
This consists of six strings representing integers [0..255],
|
||||||
|
Loading…
Reference in New Issue
Block a user