mirror of
https://github.com/esphome/esphome.git
synced 2024-11-21 11:37:27 +01:00
Drop Python 2 Support (#793)
* Remove Python 2 support * Remove u-strings * Remove docker symlinks * Remove from travis * Update requirements * Upgrade flake8/pylint * Fixes * Manual * Run pyupgrade * Lint * Remove base_int * Fix * Update platformio_api.py * Update component.cpp
This commit is contained in:
parent
b5714cd70f
commit
056c72d50d
@ -21,12 +21,6 @@ matrix:
|
||||
- esphome tests/test1.yaml compile
|
||||
- esphome tests/test2.yaml compile
|
||||
- esphome tests/test3.yaml compile
|
||||
- python: "2.7"
|
||||
env: TARGET=Test2.7
|
||||
script:
|
||||
- esphome tests/test1.yaml compile
|
||||
- esphome tests/test2.yaml compile
|
||||
- esphome tests/test3.yaml compile
|
||||
- env: TARGET=Cpp-Lint
|
||||
dist: trusty
|
||||
sudo: required
|
||||
|
@ -14,7 +14,5 @@ RUN \
|
||||
COPY requirements_test.txt /requirements_test.txt
|
||||
RUN pip3 install --no-cache-dir wheel && pip3 install --no-cache-dir -r /requirements_test.txt
|
||||
|
||||
RUN ln -s /usr/bin/pip3 /usr/bin/pip && ln -f -s /usr/bin/python3 /usr/bin/python
|
||||
|
||||
VOLUME ["/esphome"]
|
||||
WORKDIR /esphome
|
||||
|
@ -1,5 +1,3 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import functools
|
||||
import logging
|
||||
@ -14,7 +12,6 @@ from esphome.const import CONF_BAUD_RATE, CONF_BROKER, CONF_LOGGER, CONF_OTA, \
|
||||
CONF_PASSWORD, CONF_PORT, CONF_ESPHOME, CONF_PLATFORMIO_OPTIONS
|
||||
from esphome.core import CORE, EsphomeError, coroutine, coroutine_with_priority
|
||||
from esphome.helpers import color, indent
|
||||
from esphome.py_compat import IS_PY2, safe_input, IS_PY3
|
||||
from esphome.util import run_external_command, run_external_process, safe_print, list_yaml_files
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -42,12 +39,12 @@ def choose_prompt(options):
|
||||
if len(options) == 1:
|
||||
return options[0][1]
|
||||
|
||||
safe_print(u"Found multiple options, please choose one:")
|
||||
safe_print("Found multiple options, please choose one:")
|
||||
for i, (desc, _) in enumerate(options):
|
||||
safe_print(u" [{}] {}".format(i + 1, desc))
|
||||
safe_print(f" [{i+1}] {desc}")
|
||||
|
||||
while True:
|
||||
opt = safe_input('(number): ')
|
||||
opt = input('(number): ')
|
||||
if opt in options:
|
||||
opt = options.index(opt)
|
||||
break
|
||||
@ -57,20 +54,20 @@ def choose_prompt(options):
|
||||
raise ValueError
|
||||
break
|
||||
except ValueError:
|
||||
safe_print(color('red', u"Invalid option: '{}'".format(opt)))
|
||||
safe_print(color('red', f"Invalid option: '{opt}'"))
|
||||
return options[opt - 1][1]
|
||||
|
||||
|
||||
def choose_upload_log_host(default, check_default, show_ota, show_mqtt, show_api):
|
||||
options = []
|
||||
for res, desc in get_serial_ports():
|
||||
options.append((u"{} ({})".format(res, desc), res))
|
||||
options.append((f"{res} ({desc})", res))
|
||||
if (show_ota and 'ota' in CORE.config) or (show_api and 'api' in CORE.config):
|
||||
options.append((u"Over The Air ({})".format(CORE.address), CORE.address))
|
||||
options.append((f"Over The Air ({CORE.address})", CORE.address))
|
||||
if default == 'OTA':
|
||||
return CORE.address
|
||||
if show_mqtt and 'mqtt' in CORE.config:
|
||||
options.append((u"MQTT ({})".format(CORE.config['mqtt'][CONF_BROKER]), 'MQTT'))
|
||||
options.append(("MQTT ({})".format(CORE.config['mqtt'][CONF_BROKER]), 'MQTT'))
|
||||
if default == 'OTA':
|
||||
return 'MQTT'
|
||||
if default is not None:
|
||||
@ -108,11 +105,7 @@ def run_miniterm(config, port):
|
||||
except serial.SerialException:
|
||||
_LOGGER.error("Serial port closed!")
|
||||
return
|
||||
if IS_PY2:
|
||||
line = raw.replace('\r', '').replace('\n', '')
|
||||
else:
|
||||
line = raw.replace(b'\r', b'').replace(b'\n', b'').decode('utf8',
|
||||
'backslashreplace')
|
||||
line = raw.replace(b'\r', b'').replace(b'\n', b'').decode('utf8', 'backslashreplace')
|
||||
time = datetime.now().time().strftime('[%H:%M:%S]')
|
||||
message = time + line
|
||||
safe_print(message)
|
||||
@ -127,11 +120,9 @@ def wrap_to_code(name, comp):
|
||||
@functools.wraps(comp.to_code)
|
||||
@coroutine_with_priority(coro.priority)
|
||||
def wrapped(conf):
|
||||
cg.add(cg.LineComment(u"{}:".format(name)))
|
||||
cg.add(cg.LineComment(f"{name}:"))
|
||||
if comp.config_schema is not None:
|
||||
conf_str = yaml_util.dump(conf)
|
||||
if IS_PY2:
|
||||
conf_str = conf_str.decode('utf-8')
|
||||
conf_str = conf_str.replace('//', '')
|
||||
cg.add(cg.LineComment(indent(conf_str)))
|
||||
yield coro(conf)
|
||||
@ -243,7 +234,7 @@ def setup_log(debug=False, quiet=False):
|
||||
log_level = logging.INFO
|
||||
logging.basicConfig(level=log_level)
|
||||
fmt = "%(levelname)s %(message)s"
|
||||
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
|
||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||
datefmt = '%H:%M:%S'
|
||||
|
||||
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
||||
@ -292,12 +283,12 @@ def command_compile(args, config):
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
if args.only_generate:
|
||||
_LOGGER.info(u"Successfully generated source code.")
|
||||
_LOGGER.info("Successfully generated source code.")
|
||||
return 0
|
||||
exit_code = compile_program(args, config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info(u"Successfully compiled program.")
|
||||
_LOGGER.info("Successfully compiled program.")
|
||||
return 0
|
||||
|
||||
|
||||
@ -307,7 +298,7 @@ def command_upload(args, config):
|
||||
exit_code = upload_program(config, args, port)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info(u"Successfully uploaded program.")
|
||||
_LOGGER.info("Successfully uploaded program.")
|
||||
return 0
|
||||
|
||||
|
||||
@ -324,13 +315,13 @@ def command_run(args, config):
|
||||
exit_code = compile_program(args, config)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info(u"Successfully compiled program.")
|
||||
_LOGGER.info("Successfully compiled program.")
|
||||
port = choose_upload_log_host(default=args.upload_port, check_default=None,
|
||||
show_ota=True, show_mqtt=False, show_api=True)
|
||||
exit_code = upload_program(config, args, port)
|
||||
if exit_code != 0:
|
||||
return exit_code
|
||||
_LOGGER.info(u"Successfully uploaded program.")
|
||||
_LOGGER.info("Successfully uploaded program.")
|
||||
if args.no_logs:
|
||||
return 0
|
||||
port = choose_upload_log_host(default=args.upload_port, check_default=port,
|
||||
@ -349,7 +340,7 @@ def command_mqtt_fingerprint(args, config):
|
||||
|
||||
|
||||
def command_version(args):
|
||||
safe_print(u"Version: {}".format(const.__version__))
|
||||
safe_print(f"Version: {const.__version__}")
|
||||
return 0
|
||||
|
||||
|
||||
@ -377,10 +368,10 @@ def command_update_all(args):
|
||||
twidth = 60
|
||||
|
||||
def print_bar(middle_text):
|
||||
middle_text = " {} ".format(middle_text)
|
||||
middle_text = f" {middle_text} "
|
||||
width = len(click.unstyle(middle_text))
|
||||
half_line = "=" * ((twidth - width) // 2)
|
||||
click.echo("%s%s%s" % (half_line, middle_text, half_line))
|
||||
click.echo(f"{half_line}{middle_text}{half_line}")
|
||||
|
||||
for f in files:
|
||||
print("Updating {}".format(color('cyan', f)))
|
||||
@ -431,7 +422,7 @@ POST_CONFIG_ACTIONS = {
|
||||
|
||||
|
||||
def parse_args(argv):
|
||||
parser = argparse.ArgumentParser(description='ESPHome v{}'.format(const.__version__))
|
||||
parser = argparse.ArgumentParser(description=f'ESPHome v{const.__version__}')
|
||||
parser.add_argument('-v', '--verbose', help="Enable verbose esphome logs.",
|
||||
action='store_true')
|
||||
parser.add_argument('-q', '--quiet', help="Disable all esphome logs.",
|
||||
@ -525,14 +516,10 @@ def run_esphome(argv):
|
||||
_LOGGER.error("Missing configuration parameter, see esphome --help.")
|
||||
return 1
|
||||
|
||||
if IS_PY2:
|
||||
_LOGGER.warning("You're using ESPHome with python 2. Support for python 2 is deprecated "
|
||||
"and will be removed in 1.15.0. Please reinstall ESPHome with python 3.6 "
|
||||
"or higher.")
|
||||
elif IS_PY3 and sys.version_info < (3, 6, 0):
|
||||
_LOGGER.warning("You're using ESPHome with python 3.5. Support for python 3.5 is "
|
||||
"deprecated and will be removed in 1.15.0. Please reinstall ESPHome with "
|
||||
"python 3.6 or higher.")
|
||||
if sys.version_info < (3, 6, 0):
|
||||
_LOGGER.error("You're running ESPHome with Python <3.6. ESPHome is no longer compatible "
|
||||
"with this Python version. Please reinstall ESPHome with Python 3.6+")
|
||||
return 1
|
||||
|
||||
if args.command in PRE_CONFIG_ACTIONS:
|
||||
try:
|
||||
@ -551,7 +538,7 @@ def run_esphome(argv):
|
||||
CORE.config = config
|
||||
|
||||
if args.command not in POST_CONFIG_ACTIONS:
|
||||
safe_print(u"Unknown command {}".format(args.command))
|
||||
safe_print(f"Unknown command {args.command}")
|
||||
|
||||
try:
|
||||
rc = POST_CONFIG_ACTIONS[args.command](args, config)
|
||||
|
@ -14,7 +14,6 @@ import esphome.api.api_pb2 as pb
|
||||
from esphome.const import CONF_PASSWORD, CONF_PORT
|
||||
from esphome.core import EsphomeError
|
||||
from esphome.helpers import resolve_ip_address, indent, color
|
||||
from esphome.py_compat import text_type, IS_PY2, byte_to_bytes, char_to_byte
|
||||
from esphome.util import safe_print
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -67,16 +66,16 @@ MESSAGE_TYPE_TO_PROTO = {
|
||||
|
||||
def _varuint_to_bytes(value):
|
||||
if value <= 0x7F:
|
||||
return byte_to_bytes(value)
|
||||
return bytes([value])
|
||||
|
||||
ret = bytes()
|
||||
while value:
|
||||
temp = value & 0x7F
|
||||
value >>= 7
|
||||
if value:
|
||||
ret += byte_to_bytes(temp | 0x80)
|
||||
ret += bytes([temp | 0x80])
|
||||
else:
|
||||
ret += byte_to_bytes(temp)
|
||||
ret += bytes([temp])
|
||||
|
||||
return ret
|
||||
|
||||
@ -84,8 +83,7 @@ def _varuint_to_bytes(value):
|
||||
def _bytes_to_varuint(value):
|
||||
result = 0
|
||||
bitpos = 0
|
||||
for c in value:
|
||||
val = char_to_byte(c)
|
||||
for val in value:
|
||||
result |= (val & 0x7F) << bitpos
|
||||
bitpos += 7
|
||||
if (val & 0x80) == 0:
|
||||
@ -191,8 +189,8 @@ class APIClient(threading.Thread):
|
||||
self._socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
self._socket.connect((ip, self._port))
|
||||
except socket.error as err:
|
||||
err = APIConnectionError("Error connecting to {}: {}".format(ip, err))
|
||||
except OSError as err:
|
||||
err = APIConnectionError(f"Error connecting to {ip}: {err}")
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
self._socket.settimeout(0.1)
|
||||
@ -200,7 +198,7 @@ class APIClient(threading.Thread):
|
||||
self._socket_open_event.set()
|
||||
|
||||
hello = pb.HelloRequest()
|
||||
hello.client_info = 'ESPHome v{}'.format(const.__version__)
|
||||
hello.client_info = f'ESPHome v{const.__version__}'
|
||||
try:
|
||||
resp = self._send_message_await_response(hello, pb.HelloResponse)
|
||||
except APIConnectionError as err:
|
||||
@ -251,8 +249,8 @@ class APIClient(threading.Thread):
|
||||
with self._socket_write_lock:
|
||||
try:
|
||||
self._socket.sendall(data)
|
||||
except socket.error as err:
|
||||
err = APIConnectionError("Error while writing data: {}".format(err))
|
||||
except OSError as err:
|
||||
err = APIConnectionError(f"Error while writing data: {err}")
|
||||
self._fatal_error(err)
|
||||
raise err
|
||||
|
||||
@ -265,10 +263,7 @@ class APIClient(threading.Thread):
|
||||
raise ValueError
|
||||
|
||||
encoded = msg.SerializeToString()
|
||||
_LOGGER.debug("Sending %s:\n%s", type(msg), indent(text_type(msg)))
|
||||
if IS_PY2:
|
||||
req = chr(0x00)
|
||||
else:
|
||||
_LOGGER.debug("Sending %s:\n%s", type(msg), indent(str(msg)))
|
||||
req = bytes([0])
|
||||
req += _varuint_to_bytes(len(encoded))
|
||||
req += _varuint_to_bytes(message_type)
|
||||
@ -355,14 +350,14 @@ class APIClient(threading.Thread):
|
||||
raise APIConnectionError("Socket was closed")
|
||||
except socket.timeout:
|
||||
continue
|
||||
except socket.error as err:
|
||||
raise APIConnectionError("Error while receiving data: {}".format(err))
|
||||
except OSError as err:
|
||||
raise APIConnectionError(f"Error while receiving data: {err}")
|
||||
ret += val
|
||||
return ret
|
||||
|
||||
def _recv_varint(self):
|
||||
raw = bytes()
|
||||
while not raw or char_to_byte(raw[-1]) & 0x80:
|
||||
while not raw or raw[-1] & 0x80:
|
||||
raw += self._recv(1)
|
||||
return _bytes_to_varuint(raw)
|
||||
|
||||
@ -371,7 +366,7 @@ class APIClient(threading.Thread):
|
||||
return
|
||||
|
||||
# Preamble
|
||||
if char_to_byte(self._recv(1)[0]) != 0x00:
|
||||
if self._recv(1)[0] != 0x00:
|
||||
raise APIConnectionError("Invalid preamble")
|
||||
|
||||
length = self._recv_varint()
|
||||
@ -436,7 +431,7 @@ def run_logs(config, address):
|
||||
return
|
||||
|
||||
if err:
|
||||
_LOGGER.warning(u"Disconnected from API: %s", err)
|
||||
_LOGGER.warning("Disconnected from API: %s", err)
|
||||
|
||||
while retry_timer:
|
||||
retry_timer.pop(0).cancel()
|
||||
@ -454,18 +449,18 @@ def run_logs(config, address):
|
||||
|
||||
wait_time = int(min(1.5**min(tries, 100), 30))
|
||||
if not has_connects:
|
||||
_LOGGER.warning(u"Initial connection failed. The ESP might not be connected "
|
||||
u"to WiFi yet (%s). Re-Trying in %s seconds",
|
||||
_LOGGER.warning("Initial connection failed. The ESP might not be connected "
|
||||
"to WiFi yet (%s). Re-Trying in %s seconds",
|
||||
error, wait_time)
|
||||
else:
|
||||
_LOGGER.warning(u"Couldn't connect to API (%s). Trying to reconnect in %s seconds",
|
||||
_LOGGER.warning("Couldn't connect to API (%s). Trying to reconnect in %s seconds",
|
||||
error, wait_time)
|
||||
timer = threading.Timer(wait_time, functools.partial(try_connect, None, tries + 1))
|
||||
timer.start()
|
||||
retry_timer.append(timer)
|
||||
|
||||
def on_log(msg):
|
||||
time_ = datetime.now().time().strftime(u'[%H:%M:%S]')
|
||||
time_ = datetime.now().time().strftime('[%H:%M:%S]')
|
||||
text = msg.message
|
||||
if msg.send_failed:
|
||||
text = color('white', '(Message skipped because it was too big to fit in '
|
||||
|
@ -83,9 +83,9 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||
try:
|
||||
return cv.Schema([schema])(value)
|
||||
except cv.Invalid as err2:
|
||||
if u'extra keys not allowed' in str(err2) and len(err2.path) == 2:
|
||||
if 'extra keys not allowed' in str(err2) and len(err2.path) == 2:
|
||||
raise err
|
||||
if u'Unable to find action' in str(err):
|
||||
if 'Unable to find action' in str(err):
|
||||
raise err2
|
||||
raise cv.MultipleInvalid([err, err2])
|
||||
elif isinstance(value, dict):
|
||||
|
@ -36,4 +36,4 @@ def to_code(config):
|
||||
continue
|
||||
conf = config[key]
|
||||
sens = yield sensor.new_sensor(conf)
|
||||
cg.add(getattr(var, 'set_{}_sensor'.format(key))(sens))
|
||||
cg.add(getattr(var, f'set_{key}_sensor')(sens))
|
||||
|
@ -2,7 +2,6 @@ import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import sensor, voltage_sampler
|
||||
from esphome.const import CONF_GAIN, CONF_MULTIPLEXER, ICON_FLASH, UNIT_VOLT, CONF_ID
|
||||
from esphome.py_compat import string_types
|
||||
from . import ads1115_ns, ADS1115Component
|
||||
|
||||
DEPENDENCIES = ['ads1115']
|
||||
@ -32,9 +31,9 @@ GAIN = {
|
||||
|
||||
def validate_gain(value):
|
||||
if isinstance(value, float):
|
||||
value = u'{:0.03f}'.format(value)
|
||||
elif not isinstance(value, string_types):
|
||||
raise cv.Invalid('invalid gain "{}"'.format(value))
|
||||
value = f'{value:0.03f}'
|
||||
elif not isinstance(value, str):
|
||||
raise cv.Invalid(f'invalid gain "{value}"')
|
||||
|
||||
return cv.enum(GAIN)(value)
|
||||
|
||||
|
@ -102,7 +102,7 @@ def homeassistant_service_to_code(config, action_id, template_arg, args):
|
||||
|
||||
def validate_homeassistant_event(value):
|
||||
value = cv.string(value)
|
||||
if not value.startswith(u'esphome.'):
|
||||
if not value.startswith('esphome.'):
|
||||
raise cv.Invalid("ESPHome can only generate Home Assistant events that begin with "
|
||||
"esphome. For example 'esphome.xyz'")
|
||||
return value
|
||||
|
@ -9,7 +9,6 @@ from esphome.const import CONF_DEVICE_CLASS, CONF_FILTERS, \
|
||||
CONF_ON_DOUBLE_CLICK, CONF_ON_MULTI_CLICK, CONF_ON_PRESS, CONF_ON_RELEASE, CONF_ON_STATE, \
|
||||
CONF_STATE, CONF_TIMING, CONF_TRIGGER_ID, CONF_FOR, CONF_NAME, CONF_MQTT_ID
|
||||
from esphome.core import CORE, coroutine, coroutine_with_priority
|
||||
from esphome.py_compat import string_types
|
||||
from esphome.util import Registry
|
||||
|
||||
DEVICE_CLASSES = [
|
||||
@ -94,7 +93,7 @@ MULTI_CLICK_TIMING_SCHEMA = cv.Schema({
|
||||
|
||||
|
||||
def parse_multi_click_timing_str(value):
|
||||
if not isinstance(value, string_types):
|
||||
if not isinstance(value, str):
|
||||
return value
|
||||
|
||||
parts = value.lower().split(' ')
|
||||
@ -104,10 +103,10 @@ def parse_multi_click_timing_str(value):
|
||||
try:
|
||||
state = cv.boolean(parts[0])
|
||||
except cv.Invalid:
|
||||
raise cv.Invalid(u"First word must either be ON or OFF, not {}".format(parts[0]))
|
||||
raise cv.Invalid("First word must either be ON or OFF, not {}".format(parts[0]))
|
||||
|
||||
if parts[1] != 'for':
|
||||
raise cv.Invalid(u"Second word must be 'for', got {}".format(parts[1]))
|
||||
raise cv.Invalid("Second word must be 'for', got {}".format(parts[1]))
|
||||
|
||||
if parts[2] == 'at':
|
||||
if parts[3] == 'least':
|
||||
@ -115,12 +114,12 @@ def parse_multi_click_timing_str(value):
|
||||
elif parts[3] == 'most':
|
||||
key = CONF_MAX_LENGTH
|
||||
else:
|
||||
raise cv.Invalid(u"Third word after at must either be 'least' or 'most', got {}"
|
||||
u"".format(parts[3]))
|
||||
raise cv.Invalid("Third word after at must either be 'least' or 'most', got {}"
|
||||
"".format(parts[3]))
|
||||
try:
|
||||
length = cv.positive_time_period_milliseconds(parts[4])
|
||||
except cv.Invalid as err:
|
||||
raise cv.Invalid(u"Multi Click Grammar Parsing length failed: {}".format(err))
|
||||
raise cv.Invalid(f"Multi Click Grammar Parsing length failed: {err}")
|
||||
return {
|
||||
CONF_STATE: state,
|
||||
key: str(length)
|
||||
@ -132,12 +131,12 @@ def parse_multi_click_timing_str(value):
|
||||
try:
|
||||
min_length = cv.positive_time_period_milliseconds(parts[2])
|
||||
except cv.Invalid as err:
|
||||
raise cv.Invalid(u"Multi Click Grammar Parsing minimum length failed: {}".format(err))
|
||||
raise cv.Invalid(f"Multi Click Grammar Parsing minimum length failed: {err}")
|
||||
|
||||
try:
|
||||
max_length = cv.positive_time_period_milliseconds(parts[4])
|
||||
except cv.Invalid as err:
|
||||
raise cv.Invalid(u"Multi Click Grammar Parsing minimum length failed: {}".format(err))
|
||||
raise cv.Invalid(f"Multi Click Grammar Parsing minimum length failed: {err}")
|
||||
|
||||
return {
|
||||
CONF_STATE: state,
|
||||
|
@ -8,8 +8,8 @@ from esphome.const import CONF_ID, CONF_MODE, CONF_NUMBER, CONF_PINS, CONF_RUN_C
|
||||
def validate_pin_number(value):
|
||||
valid_pins = [0, 2, 4, 12, 13, 14, 15, 25, 26, 27, 32, 33, 34, 35, 36, 37, 38, 39]
|
||||
if value[CONF_NUMBER] not in valid_pins:
|
||||
raise cv.Invalid(u"Only pins {} support wakeup"
|
||||
u"".format(', '.join(str(x) for x in valid_pins)))
|
||||
raise cv.Invalid("Only pins {} support wakeup"
|
||||
"".format(', '.join(str(x) for x in valid_pins)))
|
||||
return value
|
||||
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
# coding=utf-8
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome import core, automation
|
||||
@ -27,7 +26,7 @@ DISPLAY_ROTATIONS = {
|
||||
|
||||
def validate_rotation(value):
|
||||
value = cv.string(value)
|
||||
if value.endswith(u"°"):
|
||||
if value.endswith("°"):
|
||||
value = value[:-1]
|
||||
return cv.enum(DISPLAY_ROTATIONS, int=True)(value)
|
||||
|
||||
|
@ -46,33 +46,33 @@ def bt_uuid(value):
|
||||
pattern = re.compile("^[A-F|0-9]{4,}$")
|
||||
if not pattern.match(value):
|
||||
raise cv.Invalid(
|
||||
u"Invalid hexadecimal value for 16 bit UUID format: '{}'".format(in_value))
|
||||
f"Invalid hexadecimal value for 16 bit UUID format: '{in_value}'")
|
||||
return value
|
||||
if len(value) == len(bt_uuid32_format):
|
||||
pattern = re.compile("^[A-F|0-9]{8,}$")
|
||||
if not pattern.match(value):
|
||||
raise cv.Invalid(
|
||||
u"Invalid hexadecimal value for 32 bit UUID format: '{}'".format(in_value))
|
||||
f"Invalid hexadecimal value for 32 bit UUID format: '{in_value}'")
|
||||
return value
|
||||
if len(value) == len(bt_uuid128_format):
|
||||
pattern = re.compile(
|
||||
"^[A-F|0-9]{8,}-[A-F|0-9]{4,}-[A-F|0-9]{4,}-[A-F|0-9]{4,}-[A-F|0-9]{12,}$")
|
||||
if not pattern.match(value):
|
||||
raise cv.Invalid(
|
||||
u"Invalid hexadecimal value for 128 UUID format: '{}'".format(in_value))
|
||||
f"Invalid hexadecimal value for 128 UUID format: '{in_value}'")
|
||||
return value
|
||||
raise cv.Invalid(
|
||||
u"Service UUID must be in 16 bit '{}', 32 bit '{}', or 128 bit '{}' format".format(
|
||||
"Service UUID must be in 16 bit '{}', 32 bit '{}', or 128 bit '{}' format".format(
|
||||
bt_uuid16_format, bt_uuid32_format, bt_uuid128_format))
|
||||
|
||||
|
||||
def as_hex(value):
|
||||
return cg.RawExpression('0x{}ULL'.format(value))
|
||||
return cg.RawExpression(f'0x{value}ULL')
|
||||
|
||||
|
||||
def as_hex_array(value):
|
||||
value = value.replace("-", "")
|
||||
cpp_array = ['0x{}'.format(part) for part in [value[i:i+2] for i in range(0, len(value), 2)]]
|
||||
cpp_array = [f'0x{part}' for part in [value[i:i+2] for i in range(0, len(value), 2)]]
|
||||
return cg.RawExpression(
|
||||
'(uint8_t*)(const uint8_t[16]){{{}}}'.format(','.join(reversed(cpp_array))))
|
||||
|
||||
|
@ -27,7 +27,7 @@ TOUCH_PADS = {
|
||||
def validate_touch_pad(value):
|
||||
value = validate_gpio_pin(value)
|
||||
if value not in TOUCH_PADS:
|
||||
raise cv.Invalid("Pin {} does not support touch pads.".format(value))
|
||||
raise cv.Invalid(f"Pin {value} does not support touch pads.")
|
||||
return value
|
||||
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
# coding=utf-8
|
||||
import functools
|
||||
|
||||
from esphome import core
|
||||
from esphome.components import display
|
||||
import esphome.config_validation as cv
|
||||
import esphome.codegen as cg
|
||||
from esphome.const import CONF_FILE, CONF_GLYPHS, CONF_ID, CONF_SIZE
|
||||
from esphome.core import CORE, HexInt
|
||||
from esphome.py_compat import sort_by_cmp
|
||||
|
||||
DEPENDENCIES = ['display']
|
||||
MULTI_CONF = True
|
||||
@ -33,9 +33,9 @@ def validate_glyphs(value):
|
||||
return -1
|
||||
if len(x_) > len(y_):
|
||||
return 1
|
||||
raise cv.Invalid(u"Found duplicate glyph {}".format(x))
|
||||
raise cv.Invalid(f"Found duplicate glyph {x}")
|
||||
|
||||
sort_by_cmp(value, comparator)
|
||||
value.sort(key=functools.cmp_to_key(comparator))
|
||||
return value
|
||||
|
||||
|
||||
@ -55,15 +55,15 @@ def validate_pillow_installed(value):
|
||||
|
||||
def validate_truetype_file(value):
|
||||
if value.endswith('.zip'): # for Google Fonts downloads
|
||||
raise cv.Invalid(u"Please unzip the font archive '{}' first and then use the .ttf files "
|
||||
u"inside.".format(value))
|
||||
raise cv.Invalid("Please unzip the font archive '{}' first and then use the .ttf files "
|
||||
"inside.".format(value))
|
||||
if not value.endswith('.ttf'):
|
||||
raise cv.Invalid(u"Only truetype (.ttf) files are supported. Please make sure you're "
|
||||
u"using the correct format or rename the extension to .ttf")
|
||||
raise cv.Invalid("Only truetype (.ttf) files are supported. Please make sure you're "
|
||||
"using the correct format or rename the extension to .ttf")
|
||||
return cv.file_(value)
|
||||
|
||||
|
||||
DEFAULT_GLYPHS = u' !"%()+,-.:0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz°'
|
||||
DEFAULT_GLYPHS = ' !"%()+,-.:0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz°'
|
||||
CONF_RAW_DATA_ID = 'raw_data_id'
|
||||
|
||||
FONT_SCHEMA = cv.Schema({
|
||||
@ -84,7 +84,7 @@ def to_code(config):
|
||||
try:
|
||||
font = ImageFont.truetype(path, config[CONF_SIZE])
|
||||
except Exception as e:
|
||||
raise core.EsphomeError(u"Could not load truetype file {}: {}".format(path, e))
|
||||
raise core.EsphomeError(f"Could not load truetype file {path}: {e}")
|
||||
|
||||
ascent, descent = font.getmetrics()
|
||||
|
||||
|
@ -4,7 +4,6 @@ from esphome import config_validation as cv, automation
|
||||
from esphome import codegen as cg
|
||||
from esphome.const import CONF_ID, CONF_INITIAL_VALUE, CONF_RESTORE_VALUE, CONF_TYPE, CONF_VALUE
|
||||
from esphome.core import coroutine_with_priority
|
||||
from esphome.py_compat import IS_PY3
|
||||
|
||||
globals_ns = cg.esphome_ns.namespace('globals')
|
||||
GlobalsComponent = globals_ns.class_('GlobalsComponent', cg.Component)
|
||||
@ -36,7 +35,7 @@ def to_code(config):
|
||||
|
||||
if config[CONF_RESTORE_VALUE]:
|
||||
value = config[CONF_ID].id
|
||||
if IS_PY3 and isinstance(value, str):
|
||||
if isinstance(value, str):
|
||||
value = value.encode()
|
||||
hash_ = int(hashlib.md5(value).hexdigest()[:8], 16)
|
||||
cg.add(glob.set_restore_value(hash_))
|
||||
|
@ -1,11 +1,9 @@
|
||||
# coding=utf-8
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
from esphome.const import (CONF_ADDRESS, CONF_ID, CONF_OVERSAMPLING, CONF_RANGE, ICON_MAGNET,
|
||||
UNIT_MICROTESLA, UNIT_DEGREES, ICON_SCREEN_ROTATION,
|
||||
CONF_UPDATE_INTERVAL)
|
||||
from esphome.py_compat import text_type
|
||||
|
||||
DEPENDENCIES = ['i2c']
|
||||
|
||||
@ -54,7 +52,7 @@ def validate_enum(enum_values, units=None, int=True):
|
||||
_units = []
|
||||
if units is not None:
|
||||
_units = units if isinstance(units, list) else [units]
|
||||
_units = [text_type(x) for x in _units]
|
||||
_units = [str(x) for x in _units]
|
||||
enum_bound = cv.enum(enum_values, int=int)
|
||||
|
||||
def validate_enum_bound(value):
|
||||
@ -74,7 +72,7 @@ CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_id(HMC5883LComponent),
|
||||
cv.Optional(CONF_ADDRESS): cv.i2c_address,
|
||||
cv.Optional(CONF_OVERSAMPLING, default='1x'): validate_enum(HMC5883LOversamplings, units="x"),
|
||||
cv.Optional(CONF_RANGE, default=u'130µT'): validate_enum(HMC5883L_RANGES, units=["uT", u"µT"]),
|
||||
cv.Optional(CONF_RANGE, default='130µT'): validate_enum(HMC5883L_RANGES, units=["uT", "µT"]),
|
||||
cv.Optional(CONF_FIELD_STRENGTH_X): field_strength_schema,
|
||||
cv.Optional(CONF_FIELD_STRENGTH_Y): field_strength_schema,
|
||||
cv.Optional(CONF_FIELD_STRENGTH_Z): field_strength_schema,
|
||||
|
@ -1,3 +1,5 @@
|
||||
import urllib.parse as urlparse
|
||||
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome import automation
|
||||
@ -5,12 +7,6 @@ from esphome.const import CONF_ID, CONF_TIMEOUT, CONF_ESPHOME, CONF_METHOD, \
|
||||
CONF_ARDUINO_VERSION, ARDUINO_VERSION_ESP8266_2_5_0
|
||||
from esphome.core import CORE, Lambda
|
||||
from esphome.core_config import PLATFORMIO_ESP8266_LUT
|
||||
from esphome.py_compat import IS_PY3
|
||||
|
||||
if IS_PY3:
|
||||
import urllib.parse as urlparse # pylint: disable=no-name-in-module,import-error
|
||||
else:
|
||||
import urlparse # pylint: disable=import-error
|
||||
|
||||
DEPENDENCIES = ['network']
|
||||
AUTO_LOAD = ['json']
|
||||
|
@ -1,4 +1,3 @@
|
||||
# coding=utf-8
|
||||
import logging
|
||||
|
||||
from esphome import core
|
||||
@ -33,7 +32,7 @@ def to_code(config):
|
||||
try:
|
||||
image = Image.open(path)
|
||||
except Exception as e:
|
||||
raise core.EsphomeError(u"Could not load image file {}: {}".format(path, e))
|
||||
raise core.EsphomeError(f"Could not load image file {path}: {e}")
|
||||
|
||||
if CONF_RESIZE in config:
|
||||
image.thumbnail(config[CONF_RESIZE])
|
||||
|
@ -1,4 +1,3 @@
|
||||
# coding=utf-8
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
|
@ -1,4 +1,3 @@
|
||||
# coding=utf-8
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
|
@ -1,4 +1,3 @@
|
||||
# coding=utf-8
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
|
@ -253,8 +253,8 @@ def validate_effects(allowed_effects):
|
||||
name = x[key][CONF_NAME]
|
||||
if name in names:
|
||||
errors.append(
|
||||
cv.Invalid(u"Found the effect name '{}' twice. All effects must have "
|
||||
u"unique names".format(name), [i])
|
||||
cv.Invalid("Found the effect name '{}' twice. All effects must have "
|
||||
"unique names".format(name), [i])
|
||||
)
|
||||
continue
|
||||
names.add(name)
|
||||
|
@ -7,7 +7,6 @@ from esphome.automation import LambdaAction
|
||||
from esphome.const import CONF_ARGS, CONF_BAUD_RATE, CONF_FORMAT, CONF_HARDWARE_UART, CONF_ID, \
|
||||
CONF_LEVEL, CONF_LOGS, CONF_ON_MESSAGE, CONF_TAG, CONF_TRIGGER_ID, CONF_TX_BUFFER_SIZE
|
||||
from esphome.core import CORE, EsphomeError, Lambda, coroutine_with_priority
|
||||
from esphome.py_compat import text_type
|
||||
|
||||
logger_ns = cg.esphome_ns.namespace('logger')
|
||||
LOG_LEVELS = {
|
||||
@ -64,8 +63,8 @@ def validate_local_no_higher_than_global(value):
|
||||
global_level = value.get(CONF_LEVEL, 'DEBUG')
|
||||
for tag, level in value.get(CONF_LOGS, {}).items():
|
||||
if LOG_LEVEL_SEVERITY.index(level) > LOG_LEVEL_SEVERITY.index(global_level):
|
||||
raise EsphomeError(u"The local log level {} for {} must be less severe than the "
|
||||
u"global log level {}.".format(level, tag, global_level))
|
||||
raise EsphomeError("The local log level {} for {} must be less severe than the "
|
||||
"global log level {}.".format(level, tag, global_level))
|
||||
return value
|
||||
|
||||
|
||||
@ -119,7 +118,7 @@ def to_code(config):
|
||||
|
||||
if CORE.is_esp8266 and has_serial_logging and is_at_least_verbose:
|
||||
debug_serial_port = HARDWARE_UART_TO_SERIAL[config.get(CONF_HARDWARE_UART)]
|
||||
cg.add_build_flag("-DDEBUG_ESP_PORT={}".format(debug_serial_port))
|
||||
cg.add_build_flag(f"-DDEBUG_ESP_PORT={debug_serial_port}")
|
||||
cg.add_build_flag("-DLWIP_DEBUG")
|
||||
DEBUG_COMPONENTS = {
|
||||
'HTTP_CLIENT',
|
||||
@ -134,7 +133,7 @@ def to_code(config):
|
||||
# 'MDNS_RESPONDER',
|
||||
}
|
||||
for comp in DEBUG_COMPONENTS:
|
||||
cg.add_build_flag("-DDEBUG_ESP_{}".format(comp))
|
||||
cg.add_build_flag(f"-DDEBUG_ESP_{comp}")
|
||||
if CORE.is_esp32 and is_at_least_verbose:
|
||||
cg.add_build_flag('-DCORE_DEBUG_LEVEL=5')
|
||||
if CORE.is_esp32 and is_at_least_very_verbose:
|
||||
@ -165,7 +164,7 @@ def maybe_simple_message(schema):
|
||||
def validate_printf(value):
|
||||
# https://stackoverflow.com/questions/30011379/how-can-i-parse-a-c-format-string-in-python
|
||||
# pylint: disable=anomalous-backslash-in-string
|
||||
cfmt = u"""\
|
||||
cfmt = """\
|
||||
( # start of capture group 1
|
||||
% # literal "%"
|
||||
(?: # first option
|
||||
@ -179,8 +178,8 @@ def validate_printf(value):
|
||||
""" # noqa
|
||||
matches = re.findall(cfmt, value[CONF_FORMAT], flags=re.X)
|
||||
if len(matches) != len(value[CONF_ARGS]):
|
||||
raise cv.Invalid(u"Found {} printf-patterns ({}), but {} args were given!"
|
||||
u"".format(len(matches), u', '.join(matches), len(value[CONF_ARGS])))
|
||||
raise cv.Invalid("Found {} printf-patterns ({}), but {} args were given!"
|
||||
"".format(len(matches), ', '.join(matches), len(value[CONF_ARGS])))
|
||||
return value
|
||||
|
||||
|
||||
@ -196,9 +195,9 @@ LOGGER_LOG_ACTION_SCHEMA = cv.All(maybe_simple_message({
|
||||
@automation.register_action(CONF_LOGGER_LOG, LambdaAction, LOGGER_LOG_ACTION_SCHEMA)
|
||||
def logger_log_action_to_code(config, action_id, template_arg, args):
|
||||
esp_log = LOG_LEVEL_TO_ESP_LOG[config[CONF_LEVEL]]
|
||||
args_ = [cg.RawExpression(text_type(x)) for x in config[CONF_ARGS]]
|
||||
args_ = [cg.RawExpression(str(x)) for x in config[CONF_ARGS]]
|
||||
|
||||
text = text_type(cg.statement(esp_log(config[CONF_TAG], config[CONF_FORMAT], *args_)))
|
||||
text = str(cg.statement(esp_log(config[CONF_TAG], config[CONF_FORMAT], *args_)))
|
||||
|
||||
lambda_ = yield cg.process_lambda(Lambda(text), args, return_type=cg.void)
|
||||
yield cg.new_Pvariable(action_id, template_arg, lambda_)
|
||||
|
@ -39,14 +39,14 @@ def to_code(config):
|
||||
yield i2c.register_i2c_device(var, config)
|
||||
|
||||
for d in ['x', 'y', 'z']:
|
||||
accel_key = 'accel_{}'.format(d)
|
||||
accel_key = f'accel_{d}'
|
||||
if accel_key in config:
|
||||
sens = yield sensor.new_sensor(config[accel_key])
|
||||
cg.add(getattr(var, 'set_accel_{}_sensor'.format(d))(sens))
|
||||
accel_key = 'gyro_{}'.format(d)
|
||||
cg.add(getattr(var, f'set_accel_{d}_sensor')(sens))
|
||||
accel_key = f'gyro_{d}'
|
||||
if accel_key in config:
|
||||
sens = yield sensor.new_sensor(config[accel_key])
|
||||
cg.add(getattr(var, 'set_gyro_{}_sensor'.format(d))(sens))
|
||||
cg.add(getattr(var, f'set_gyro_{d}_sensor')(sens))
|
||||
|
||||
if CONF_TEMPERATURE in config:
|
||||
sens = yield sensor.new_sensor(config[CONF_TEMPERATURE])
|
||||
|
@ -64,28 +64,28 @@ def validate_config(value):
|
||||
topic_prefix = value[CONF_TOPIC_PREFIX]
|
||||
if CONF_BIRTH_MESSAGE not in value:
|
||||
out[CONF_BIRTH_MESSAGE] = {
|
||||
CONF_TOPIC: '{}/status'.format(topic_prefix),
|
||||
CONF_TOPIC: f'{topic_prefix}/status',
|
||||
CONF_PAYLOAD: 'online',
|
||||
CONF_QOS: 0,
|
||||
CONF_RETAIN: True,
|
||||
}
|
||||
if CONF_WILL_MESSAGE not in value:
|
||||
out[CONF_WILL_MESSAGE] = {
|
||||
CONF_TOPIC: '{}/status'.format(topic_prefix),
|
||||
CONF_TOPIC: f'{topic_prefix}/status',
|
||||
CONF_PAYLOAD: 'offline',
|
||||
CONF_QOS: 0,
|
||||
CONF_RETAIN: True,
|
||||
}
|
||||
if CONF_SHUTDOWN_MESSAGE not in value:
|
||||
out[CONF_SHUTDOWN_MESSAGE] = {
|
||||
CONF_TOPIC: '{}/status'.format(topic_prefix),
|
||||
CONF_TOPIC: f'{topic_prefix}/status',
|
||||
CONF_PAYLOAD: 'offline',
|
||||
CONF_QOS: 0,
|
||||
CONF_RETAIN: True,
|
||||
}
|
||||
if CONF_LOG_TOPIC not in value:
|
||||
out[CONF_LOG_TOPIC] = {
|
||||
CONF_TOPIC: '{}/debug'.format(topic_prefix),
|
||||
CONF_TOPIC: f'{topic_prefix}/debug',
|
||||
CONF_QOS: 0,
|
||||
CONF_RETAIN: True,
|
||||
}
|
||||
@ -95,7 +95,7 @@ def validate_config(value):
|
||||
def validate_fingerprint(value):
|
||||
value = cv.string(value)
|
||||
if re.match(r'^[0-9a-f]{40}$', value) is None:
|
||||
raise cv.Invalid(u"fingerprint must be valid SHA1 hash")
|
||||
raise cv.Invalid("fingerprint must be valid SHA1 hash")
|
||||
return value
|
||||
|
||||
|
||||
|
@ -82,7 +82,7 @@ def validate_method_pin(value):
|
||||
for opt in (CONF_PIN, CONF_CLOCK_PIN, CONF_DATA_PIN):
|
||||
if opt in value and value[opt] not in pins_:
|
||||
raise cv.Invalid("Method {} only supports pin(s) {}".format(
|
||||
method, ', '.join('GPIO{}'.format(x) for x in pins_)
|
||||
method, ', '.join(f'GPIO{x}' for x in pins_)
|
||||
), path=[CONF_METHOD])
|
||||
return value
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
# coding=utf-8
|
||||
from math import log
|
||||
|
||||
import esphome.config_validation as cv
|
||||
@ -28,7 +27,7 @@ def validate_calibration_parameter(value):
|
||||
value = cv.string(value)
|
||||
parts = value.split('->')
|
||||
if len(parts) != 2:
|
||||
raise cv.Invalid(u"Calibration parameter must be of form 3000 -> 23°C")
|
||||
raise cv.Invalid("Calibration parameter must be of form 3000 -> 23°C")
|
||||
voltage = cv.resistance(parts[0].strip())
|
||||
temperature = cv.temperature(parts[1].strip())
|
||||
return validate_calibration_parameter({
|
||||
|
@ -10,8 +10,8 @@ PartitionLightOutput = partitions_ns.class_('PartitionLightOutput', light.Addres
|
||||
|
||||
def validate_from_to(value):
|
||||
if value[CONF_FROM] > value[CONF_TO]:
|
||||
raise cv.Invalid(u"From ({}) must not be larger than to ({})"
|
||||
u"".format(value[CONF_FROM], value[CONF_TO]))
|
||||
raise cv.Invalid("From ({}) must not be larger than to ({})"
|
||||
"".format(value[CONF_FROM], value[CONF_TO]))
|
||||
return value
|
||||
|
||||
|
||||
|
@ -36,7 +36,7 @@ SENSORS_TO_TYPE = {
|
||||
def validate_pmsx003_sensors(value):
|
||||
for key, types in SENSORS_TO_TYPE.items():
|
||||
if key in value and value[CONF_TYPE] not in types:
|
||||
raise cv.Invalid(u"{} does not have {} sensor!".format(value[CONF_TYPE], key))
|
||||
raise cv.Invalid("{} does not have {} sensor!".format(value[CONF_TYPE], key))
|
||||
return value
|
||||
|
||||
|
||||
|
@ -1,11 +1,9 @@
|
||||
# coding=utf-8
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
from esphome.const import (CONF_ADDRESS, CONF_ID, CONF_OVERSAMPLING, CONF_RANGE, ICON_MAGNET,
|
||||
UNIT_MICROTESLA, UNIT_DEGREES, ICON_SCREEN_ROTATION,
|
||||
CONF_UPDATE_INTERVAL)
|
||||
from esphome.py_compat import text_type
|
||||
|
||||
DEPENDENCIES = ['i2c']
|
||||
|
||||
@ -46,7 +44,7 @@ def validate_enum(enum_values, units=None, int=True):
|
||||
_units = []
|
||||
if units is not None:
|
||||
_units = units if isinstance(units, list) else [units]
|
||||
_units = [text_type(x) for x in _units]
|
||||
_units = [str(x) for x in _units]
|
||||
enum_bound = cv.enum(enum_values, int=int)
|
||||
|
||||
def validate_enum_bound(value):
|
||||
@ -65,7 +63,7 @@ heading_schema = sensor.sensor_schema(UNIT_DEGREES, ICON_SCREEN_ROTATION, 1)
|
||||
CONFIG_SCHEMA = cv.Schema({
|
||||
cv.GenerateID(): cv.declare_id(QMC5883LComponent),
|
||||
cv.Optional(CONF_ADDRESS): cv.i2c_address,
|
||||
cv.Optional(CONF_RANGE, default=u'200µT'): validate_enum(QMC5883L_RANGES, units=["uT", u"µT"]),
|
||||
cv.Optional(CONF_RANGE, default='200µT'): validate_enum(QMC5883L_RANGES, units=["uT", "µT"]),
|
||||
cv.Optional(CONF_OVERSAMPLING, default="512x"): validate_enum(QMC5883LOversamplings, units="x"),
|
||||
cv.Optional(CONF_FIELD_STRENGTH_X): field_strength_schema,
|
||||
cv.Optional(CONF_FIELD_STRENGTH_Y): field_strength_schema,
|
||||
|
@ -7,7 +7,6 @@ from esphome.const import CONF_DATA, CONF_TRIGGER_ID, CONF_NBITS, CONF_ADDRESS,
|
||||
CONF_PROTOCOL, CONF_GROUP, CONF_DEVICE, CONF_STATE, CONF_CHANNEL, CONF_FAMILY, CONF_REPEAT, \
|
||||
CONF_WAIT_TIME, CONF_TIMES, CONF_TYPE_ID, CONF_CARRIER_FREQUENCY
|
||||
from esphome.core import coroutine
|
||||
from esphome.py_compat import string_types, text_type
|
||||
from esphome.util import Registry, SimpleRegistry
|
||||
|
||||
AUTO_LOAD = ['binary_sensor']
|
||||
@ -52,7 +51,7 @@ def register_trigger(name, type, data_type):
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(type),
|
||||
cv.GenerateID(CONF_RECEIVER_ID): cv.use_id(RemoteReceiverBase),
|
||||
})
|
||||
registerer = TRIGGER_REGISTRY.register('on_{}'.format(name), validator)
|
||||
registerer = TRIGGER_REGISTRY.register(f'on_{name}', validator)
|
||||
|
||||
def decorator(func):
|
||||
@coroutine
|
||||
@ -98,7 +97,7 @@ def register_action(name, type_, schema):
|
||||
cv.GenerateID(CONF_TRANSMITTER_ID): cv.use_id(RemoteTransmitterBase),
|
||||
cv.Optional(CONF_REPEAT): validate_repeat,
|
||||
})
|
||||
registerer = automation.register_action('remote_transmitter.transmit_{}'.format(name),
|
||||
registerer = automation.register_action(f'remote_transmitter.transmit_{name}',
|
||||
type_, validator)
|
||||
|
||||
def decorator(func):
|
||||
@ -122,11 +121,11 @@ def register_action(name, type_, schema):
|
||||
|
||||
|
||||
def declare_protocol(name):
|
||||
data = ns.struct('{}Data'.format(name))
|
||||
binary_sensor_ = ns.class_('{}BinarySensor'.format(name), RemoteReceiverBinarySensorBase)
|
||||
trigger = ns.class_('{}Trigger'.format(name), RemoteReceiverTrigger)
|
||||
action = ns.class_('{}Action'.format(name), RemoteTransmitterActionBase)
|
||||
dumper = ns.class_('{}Dumper'.format(name), RemoteTransmitterDumper)
|
||||
data = ns.struct(f'{name}Data')
|
||||
binary_sensor_ = ns.class_(f'{name}BinarySensor', RemoteReceiverBinarySensorBase)
|
||||
trigger = ns.class_(f'{name}Trigger', RemoteReceiverTrigger)
|
||||
action = ns.class_(f'{name}Action', RemoteTransmitterActionBase)
|
||||
dumper = ns.class_(f'{name}Dumper', RemoteTransmitterDumper)
|
||||
return data, binary_sensor_, trigger, action, dumper
|
||||
|
||||
|
||||
@ -141,7 +140,7 @@ DUMPER_REGISTRY = Registry({
|
||||
|
||||
|
||||
def validate_dumpers(value):
|
||||
if isinstance(value, string_types) and value.lower() == 'all':
|
||||
if isinstance(value, str) and value.lower() == 'all':
|
||||
return validate_dumpers(list(DUMPER_REGISTRY.keys()))
|
||||
return cv.validate_registry('dumper', DUMPER_REGISTRY)(value)
|
||||
|
||||
@ -432,12 +431,12 @@ RC_SWITCH_PROTOCOL_SCHEMA = cv.Any(
|
||||
|
||||
|
||||
def validate_rc_switch_code(value):
|
||||
if not isinstance(value, (str, text_type)):
|
||||
if not isinstance(value, (str, str)):
|
||||
raise cv.Invalid("All RCSwitch codes must be in quotes ('')")
|
||||
for c in value:
|
||||
if c not in ('0', '1'):
|
||||
raise cv.Invalid(u"Invalid RCSwitch code character '{}'. Only '0' and '1' are allowed"
|
||||
u"".format(c))
|
||||
raise cv.Invalid("Invalid RCSwitch code character '{}'. Only '0' and '1' are allowed"
|
||||
"".format(c))
|
||||
if len(value) > 64:
|
||||
raise cv.Invalid("Maximum length for RCSwitch codes is 64, code '{}' has length {}"
|
||||
"".format(value, len(value)))
|
||||
@ -447,7 +446,7 @@ def validate_rc_switch_code(value):
|
||||
|
||||
|
||||
def validate_rc_switch_raw_code(value):
|
||||
if not isinstance(value, (str, text_type)):
|
||||
if not isinstance(value, (str, str)):
|
||||
raise cv.Invalid("All RCSwitch raw codes must be in quotes ('')")
|
||||
for c in value:
|
||||
if c not in ('0', '1', 'x'):
|
||||
|
@ -19,12 +19,12 @@ def show_new(value):
|
||||
if 'name' in value:
|
||||
args.append(('name', value['name']))
|
||||
args.append(('turn_on_action', {
|
||||
'remote_transmitter.transmit_{}'.format(key): val
|
||||
f'remote_transmitter.transmit_{key}': val
|
||||
}))
|
||||
|
||||
text = yaml_util.dump([OrderedDict(args)])
|
||||
raise cv.Invalid(u"This platform has been removed in 1.13, please change to:\n\n{}\n\n."
|
||||
u"".format(text))
|
||||
raise cv.Invalid("This platform has been removed in 1.13, please change to:\n\n{}\n\n."
|
||||
"".format(text))
|
||||
|
||||
|
||||
CONFIG_SCHEMA = show_new
|
||||
|
@ -28,7 +28,7 @@ def validate_acceleration(value):
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
raise cv.Invalid("Expected acceleration as floating point number, got {}".format(value))
|
||||
raise cv.Invalid(f"Expected acceleration as floating point number, got {value}")
|
||||
|
||||
if value <= 0:
|
||||
raise cv.Invalid("Acceleration must be larger than 0 steps/s^2!")
|
||||
@ -48,7 +48,7 @@ def validate_speed(value):
|
||||
try:
|
||||
value = float(value)
|
||||
except ValueError:
|
||||
raise cv.Invalid("Expected speed as floating point number, got {}".format(value))
|
||||
raise cv.Invalid(f"Expected speed as floating point number, got {value}")
|
||||
|
||||
if value <= 0:
|
||||
raise cv.Invalid("Speed must be larger than 0 steps/s!")
|
||||
|
@ -3,7 +3,6 @@ import re
|
||||
|
||||
import esphome.config_validation as cv
|
||||
from esphome import core
|
||||
from esphome.py_compat import string_types
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -24,8 +23,8 @@ def validate_substitution_key(value):
|
||||
for char in value:
|
||||
if char not in VALID_SUBSTITUTIONS_CHARACTERS:
|
||||
raise cv.Invalid(
|
||||
u"Substitution must only consist of upper/lowercase characters, the underscore "
|
||||
u"and numbers. The character '{}' cannot be used".format(char))
|
||||
"Substitution must only consist of upper/lowercase characters, the underscore "
|
||||
"and numbers. The character '{}' cannot be used".format(char))
|
||||
return value
|
||||
|
||||
|
||||
@ -42,7 +41,7 @@ VARIABLE_PROG = re.compile('\\$([{0}]+|\\{{[{0}]*\\}})'.format(VALID_SUBSTITUTIO
|
||||
|
||||
|
||||
def _expand_substitutions(substitutions, value, path):
|
||||
if u'$' not in value:
|
||||
if '$' not in value:
|
||||
return value
|
||||
|
||||
orig_value = value
|
||||
@ -56,11 +55,11 @@ def _expand_substitutions(substitutions, value, path):
|
||||
|
||||
i, j = m.span(0)
|
||||
name = m.group(1)
|
||||
if name.startswith(u'{') and name.endswith(u'}'):
|
||||
if name.startswith('{') and name.endswith('}'):
|
||||
name = name[1:-1]
|
||||
if name not in substitutions:
|
||||
_LOGGER.warning(u"Found '%s' (see %s) which looks like a substitution, but '%s' was "
|
||||
u"not declared", orig_value, u'->'.join(str(x) for x in path), name)
|
||||
_LOGGER.warning("Found '%s' (see %s) which looks like a substitution, but '%s' was "
|
||||
"not declared", orig_value, '->'.join(str(x) for x in path), name)
|
||||
i = j
|
||||
continue
|
||||
|
||||
@ -91,7 +90,7 @@ def _substitute_item(substitutions, item, path):
|
||||
for old, new in replace_keys:
|
||||
item[new] = item[old]
|
||||
del item[old]
|
||||
elif isinstance(item, string_types):
|
||||
elif isinstance(item, str):
|
||||
sub = _expand_substitutions(substitutions, item, path)
|
||||
if sub != item:
|
||||
return sub
|
||||
@ -109,8 +108,8 @@ def do_substitution_pass(config):
|
||||
substitutions = config[CONF_SUBSTITUTIONS]
|
||||
with cv.prepend_path('substitutions'):
|
||||
if not isinstance(substitutions, dict):
|
||||
raise cv.Invalid(u"Substitutions must be a key to value mapping, got {}"
|
||||
u"".format(type(substitutions)))
|
||||
raise cv.Invalid("Substitutions must be a key to value mapping, got {}"
|
||||
"".format(type(substitutions)))
|
||||
|
||||
replace_keys = []
|
||||
for key, value in substitutions.items():
|
||||
|
@ -3,7 +3,6 @@ import esphome.config_validation as cv
|
||||
from esphome import automation
|
||||
from esphome.components import time
|
||||
from esphome.const import CONF_TIME_ID, CONF_ID, CONF_TRIGGER_ID
|
||||
from esphome.py_compat import string_types
|
||||
|
||||
sun_ns = cg.esphome_ns.namespace('sun')
|
||||
|
||||
@ -32,7 +31,7 @@ ELEVATION_MAP = {
|
||||
|
||||
|
||||
def elevation(value):
|
||||
if isinstance(value, string_types):
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
value = ELEVATION_MAP[cv.one_of(*ELEVATION_MAP, lower=True, space='_')(value)]
|
||||
except cv.Invalid:
|
||||
|
@ -1,4 +1,3 @@
|
||||
# coding=utf-8
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c, sensor
|
||||
|
@ -14,7 +14,6 @@ from esphome.const import CONF_CRON, CONF_DAYS_OF_MONTH, CONF_DAYS_OF_WEEK, CONF
|
||||
CONF_MINUTES, CONF_MONTHS, CONF_ON_TIME, CONF_SECONDS, CONF_TIMEZONE, CONF_TRIGGER_ID, \
|
||||
CONF_AT, CONF_SECOND, CONF_HOUR, CONF_MINUTE
|
||||
from esphome.core import coroutine, coroutine_with_priority
|
||||
from esphome.py_compat import string_types
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -33,10 +32,10 @@ def _tz_timedelta(td):
|
||||
if offset_hour == 0 and offset_minute == 0 and offset_second == 0:
|
||||
return '0'
|
||||
if offset_minute == 0 and offset_second == 0:
|
||||
return '{}'.format(offset_hour)
|
||||
return f'{offset_hour}'
|
||||
if offset_second == 0:
|
||||
return '{}:{}'.format(offset_hour, offset_minute)
|
||||
return '{}:{}:{}'.format(offset_hour, offset_minute, offset_second)
|
||||
return f'{offset_hour}:{offset_minute}'
|
||||
return f'{offset_hour}:{offset_minute}:{offset_second}'
|
||||
|
||||
|
||||
# https://stackoverflow.com/a/16804556/8924614
|
||||
@ -133,7 +132,7 @@ def detect_tz():
|
||||
|
||||
def _parse_cron_int(value, special_mapping, message):
|
||||
special_mapping = special_mapping or {}
|
||||
if isinstance(value, string_types) and value in special_mapping:
|
||||
if isinstance(value, str) and value in special_mapping:
|
||||
return special_mapping[value]
|
||||
try:
|
||||
return int(value)
|
||||
@ -143,41 +142,40 @@ def _parse_cron_int(value, special_mapping, message):
|
||||
|
||||
def _parse_cron_part(part, min_value, max_value, special_mapping):
|
||||
if part in ('*', '?'):
|
||||
return set(x for x in range(min_value, max_value + 1))
|
||||
return set(range(min_value, max_value + 1))
|
||||
if '/' in part:
|
||||
data = part.split('/')
|
||||
if len(data) > 2:
|
||||
raise cv.Invalid(u"Can't have more than two '/' in one time expression, got {}"
|
||||
raise cv.Invalid("Can't have more than two '/' in one time expression, got {}"
|
||||
.format(part))
|
||||
offset, repeat = data
|
||||
offset_n = 0
|
||||
if offset:
|
||||
offset_n = _parse_cron_int(offset, special_mapping,
|
||||
u"Offset for '/' time expression must be an integer, got {}")
|
||||
"Offset for '/' time expression must be an integer, got {}")
|
||||
|
||||
try:
|
||||
repeat_n = int(repeat)
|
||||
except ValueError:
|
||||
raise cv.Invalid(u"Repeat for '/' time expression must be an integer, got {}"
|
||||
raise cv.Invalid("Repeat for '/' time expression must be an integer, got {}"
|
||||
.format(repeat))
|
||||
return set(x for x in range(offset_n, max_value + 1, repeat_n))
|
||||
return set(range(offset_n, max_value + 1, repeat_n))
|
||||
if '-' in part:
|
||||
data = part.split('-')
|
||||
if len(data) > 2:
|
||||
raise cv.Invalid(u"Can't have more than two '-' in range time expression '{}'"
|
||||
raise cv.Invalid("Can't have more than two '-' in range time expression '{}'"
|
||||
.format(part))
|
||||
begin, end = data
|
||||
begin_n = _parse_cron_int(begin, special_mapping, u"Number for time range must be integer, "
|
||||
u"got {}")
|
||||
end_n = _parse_cron_int(end, special_mapping, u"Number for time range must be integer, "
|
||||
u"got {}")
|
||||
begin_n = _parse_cron_int(begin, special_mapping, "Number for time range must be integer, "
|
||||
"got {}")
|
||||
end_n = _parse_cron_int(end, special_mapping, "Number for time range must be integer, "
|
||||
"got {}")
|
||||
if end_n < begin_n:
|
||||
return set(x for x in range(end_n, max_value + 1)) | \
|
||||
set(x for x in range(min_value, begin_n + 1))
|
||||
return set(x for x in range(begin_n, end_n + 1))
|
||||
return set(range(end_n, max_value + 1)) | set(range(min_value, begin_n + 1))
|
||||
return set(range(begin_n, end_n + 1))
|
||||
|
||||
return {_parse_cron_int(part, special_mapping, u"Number for time expression must be an "
|
||||
u"integer, got {}")}
|
||||
return {_parse_cron_int(part, special_mapping, "Number for time expression must be an "
|
||||
"integer, got {}")}
|
||||
|
||||
|
||||
def cron_expression_validator(name, min_value, max_value, special_mapping=None):
|
||||
@ -249,7 +247,7 @@ def validate_cron_keys(value):
|
||||
if CONF_CRON in value:
|
||||
for key in value.keys():
|
||||
if key in CRON_KEYS:
|
||||
raise cv.Invalid("Cannot use option {} when cron: is specified.".format(key))
|
||||
raise cv.Invalid(f"Cannot use option {key} when cron: is specified.")
|
||||
if CONF_AT in value:
|
||||
raise cv.Invalid("Cannot use option at with cron!")
|
||||
cron_ = value[CONF_CRON]
|
||||
@ -259,7 +257,7 @@ def validate_cron_keys(value):
|
||||
if CONF_AT in value:
|
||||
for key in value.keys():
|
||||
if key in CRON_KEYS:
|
||||
raise cv.Invalid("Cannot use option {} when at: is specified.".format(key))
|
||||
raise cv.Invalid(f"Cannot use option {key} when at: is specified.")
|
||||
at_ = value[CONF_AT]
|
||||
value = {x: value[x] for x in value if x != CONF_AT}
|
||||
value.update(at_)
|
||||
|
@ -3,7 +3,6 @@ import esphome.config_validation as cv
|
||||
from esphome import pins, automation
|
||||
from esphome.const import CONF_BAUD_RATE, CONF_ID, CONF_RX_PIN, CONF_TX_PIN, CONF_UART_ID, CONF_DATA
|
||||
from esphome.core import CORE, coroutine
|
||||
from esphome.py_compat import text_type, binary_type, char_to_byte
|
||||
|
||||
uart_ns = cg.esphome_ns.namespace('uart')
|
||||
UARTComponent = uart_ns.class_('UARTComponent', cg.Component)
|
||||
@ -13,7 +12,7 @@ MULTI_CONF = True
|
||||
|
||||
|
||||
def validate_raw_data(value):
|
||||
if isinstance(value, text_type):
|
||||
if isinstance(value, str):
|
||||
return value.encode('utf-8')
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
@ -77,8 +76,8 @@ def uart_write_to_code(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
yield cg.register_parented(var, config[CONF_ID])
|
||||
data = config[CONF_DATA]
|
||||
if isinstance(data, binary_type):
|
||||
data = [char_to_byte(x) for x in data]
|
||||
if isinstance(data, bytes):
|
||||
data = list(data)
|
||||
|
||||
if cg.is_template(data):
|
||||
templ = yield cg.templatable(data, args, cg.std_vector.template(cg.uint8))
|
||||
|
@ -3,7 +3,6 @@ import esphome.config_validation as cv
|
||||
from esphome.components import switch, uart
|
||||
from esphome.const import CONF_DATA, CONF_ID, CONF_INVERTED
|
||||
from esphome.core import HexInt
|
||||
from esphome.py_compat import binary_type, char_to_byte
|
||||
from .. import uart_ns, validate_raw_data
|
||||
|
||||
DEPENDENCIES = ['uart']
|
||||
@ -25,6 +24,6 @@ def to_code(config):
|
||||
yield uart.register_uart_device(var, config)
|
||||
|
||||
data = config[CONF_DATA]
|
||||
if isinstance(data, binary_type):
|
||||
data = [HexInt(char_to_byte(x)) for x in data]
|
||||
if isinstance(data, bytes):
|
||||
data = [HexInt(x) for x in data]
|
||||
cg.add(var.set_data(data))
|
||||
|
@ -30,9 +30,9 @@ def validate_password(value):
|
||||
if not value:
|
||||
return value
|
||||
if len(value) < 8:
|
||||
raise cv.Invalid(u"WPA password must be at least 8 characters long")
|
||||
raise cv.Invalid("WPA password must be at least 8 characters long")
|
||||
if len(value) > 64:
|
||||
raise cv.Invalid(u"WPA password must be at most 64 characters long")
|
||||
raise cv.Invalid("WPA password must be at most 64 characters long")
|
||||
return value
|
||||
|
||||
|
||||
|
@ -1,5 +1,3 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import importlib
|
||||
import logging
|
||||
@ -18,7 +16,6 @@ from esphome.components.substitutions import CONF_SUBSTITUTIONS
|
||||
from esphome.const import CONF_ESPHOME, CONF_PLATFORM, ESP_PLATFORMS
|
||||
from esphome.core import CORE, EsphomeError # noqa
|
||||
from esphome.helpers import color, indent
|
||||
from esphome.py_compat import text_type, IS_PY2, decode_text, string_types
|
||||
from esphome.util import safe_print, OrderedDict
|
||||
|
||||
from typing import List, Optional, Tuple, Union # noqa
|
||||
@ -31,7 +28,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
_COMPONENT_CACHE = {}
|
||||
|
||||
|
||||
class ComponentManifest(object):
|
||||
class ComponentManifest:
|
||||
def __init__(self, module, base_components_path, is_core=False, is_platform=False):
|
||||
self.module = module
|
||||
self._is_core = is_core
|
||||
@ -89,7 +86,7 @@ class ComponentManifest(object):
|
||||
source_files = core.find_source_files(os.path.join(core_p, 'dummy'))
|
||||
ret = {}
|
||||
for f in source_files:
|
||||
ret['esphome/core/{}'.format(f)] = os.path.join(core_p, f)
|
||||
ret[f'esphome/core/{f}'] = os.path.join(core_p, f)
|
||||
return ret
|
||||
|
||||
source_files = core.find_source_files(self.module.__file__)
|
||||
@ -101,7 +98,7 @@ class ComponentManifest(object):
|
||||
rel = os.path.relpath(full_file, self.base_components_path)
|
||||
# Always use / for C++ include names
|
||||
rel = rel.replace(os.sep, '/')
|
||||
target_file = 'esphome/components/{}'.format(rel)
|
||||
target_file = f'esphome/components/{rel}'
|
||||
ret[target_file] = full_file
|
||||
return ret
|
||||
|
||||
@ -119,12 +116,6 @@ def _mount_config_dir():
|
||||
if not os.path.isdir(custom_path):
|
||||
CUSTOM_COMPONENTS_PATH = None
|
||||
return
|
||||
init_path = os.path.join(custom_path, '__init__.py')
|
||||
if IS_PY2 and not os.path.isfile(init_path):
|
||||
_LOGGER.warning("Found 'custom_components' folder, but file __init__.py was not found. "
|
||||
"ESPHome will automatically create it now....")
|
||||
with open(init_path, 'w') as f:
|
||||
f.write('\n')
|
||||
if CORE.config_dir not in sys.path:
|
||||
sys.path.insert(0, CORE.config_dir)
|
||||
CUSTOM_COMPONENTS_PATH = custom_path
|
||||
@ -137,7 +128,7 @@ def _lookup_module(domain, is_platform):
|
||||
_mount_config_dir()
|
||||
# First look for custom_components
|
||||
try:
|
||||
module = importlib.import_module('custom_components.{}'.format(domain))
|
||||
module = importlib.import_module(f'custom_components.{domain}')
|
||||
except ImportError as e:
|
||||
# ImportError when no such module
|
||||
if 'No module named' not in str(e):
|
||||
@ -153,7 +144,7 @@ def _lookup_module(domain, is_platform):
|
||||
return manif
|
||||
|
||||
try:
|
||||
module = importlib.import_module('esphome.components.{}'.format(domain))
|
||||
module = importlib.import_module(f'esphome.components.{domain}')
|
||||
except ImportError as e:
|
||||
if 'No module named' not in str(e):
|
||||
_LOGGER.error("Unable to import component %s:", domain, exc_info=True)
|
||||
@ -173,7 +164,7 @@ def get_component(domain):
|
||||
|
||||
|
||||
def get_platform(domain, platform):
|
||||
full = '{}.{}'.format(platform, domain)
|
||||
full = f'{platform}.{domain}'
|
||||
return _lookup_module(full, True)
|
||||
|
||||
|
||||
@ -192,7 +183,7 @@ def iter_components(config):
|
||||
yield domain, component, conf
|
||||
if component.is_platform_component:
|
||||
for p_config in conf:
|
||||
p_name = u"{}.{}".format(domain, p_config[CONF_PLATFORM])
|
||||
p_name = "{}.{}".format(domain, p_config[CONF_PLATFORM])
|
||||
platform = get_platform(domain, p_config[CONF_PLATFORM])
|
||||
yield p_name, platform, p_config
|
||||
|
||||
@ -208,13 +199,13 @@ def _path_begins_with(path, other): # type: (ConfigPath, ConfigPath) -> bool
|
||||
|
||||
class Config(OrderedDict):
|
||||
def __init__(self):
|
||||
super(Config, self).__init__()
|
||||
super().__init__()
|
||||
# A list of voluptuous errors
|
||||
self.errors = [] # type: List[vol.Invalid]
|
||||
# A list of paths that should be fully outputted
|
||||
# The values will be the paths to all "domain", for example (['logger'], 'logger')
|
||||
# or (['sensor', 'ultrasonic'], 'sensor.ultrasonic')
|
||||
self.output_paths = [] # type: List[Tuple[ConfigPath, unicode]]
|
||||
self.output_paths = [] # type: List[Tuple[ConfigPath, str]]
|
||||
|
||||
def add_error(self, error):
|
||||
# type: (vol.Invalid) -> None
|
||||
@ -234,15 +225,15 @@ class Config(OrderedDict):
|
||||
self.add_error(e)
|
||||
|
||||
def add_str_error(self, message, path):
|
||||
# type: (basestring, ConfigPath) -> None
|
||||
# type: (str, ConfigPath) -> None
|
||||
self.add_error(vol.Invalid(message, path))
|
||||
|
||||
def add_output_path(self, path, domain):
|
||||
# type: (ConfigPath, unicode) -> None
|
||||
# type: (ConfigPath, str) -> None
|
||||
self.output_paths.append((path, domain))
|
||||
|
||||
def remove_output_path(self, path, domain):
|
||||
# type: (ConfigPath, unicode) -> None
|
||||
# type: (ConfigPath, str) -> None
|
||||
self.output_paths.remove((path, domain))
|
||||
|
||||
def is_in_error_path(self, path):
|
||||
@ -312,12 +303,10 @@ def iter_ids(config, path=None):
|
||||
yield id, path
|
||||
elif isinstance(config, list):
|
||||
for i, item in enumerate(config):
|
||||
for result in iter_ids(item, path + [i]):
|
||||
yield result
|
||||
yield from iter_ids(item, path + [i])
|
||||
elif isinstance(config, dict):
|
||||
for key, value in config.items():
|
||||
for result in iter_ids(value, path + [key]):
|
||||
yield result
|
||||
yield from iter_ids(value, path + [key])
|
||||
|
||||
|
||||
def do_id_pass(result): # type: (Config) -> None
|
||||
@ -332,8 +321,8 @@ def do_id_pass(result): # type: (Config) -> None
|
||||
# Look for duplicate definitions
|
||||
match = next((v for v in declare_ids if v[0].id == id.id), None)
|
||||
if match is not None:
|
||||
opath = u'->'.join(text_type(v) for v in match[1])
|
||||
result.add_str_error(u"ID {} redefined! Check {}".format(id.id, opath), path)
|
||||
opath = '->'.join(str(v) for v in match[1])
|
||||
result.add_str_error(f"ID {id.id} redefined! Check {opath}", path)
|
||||
continue
|
||||
declare_ids.append((id, path))
|
||||
else:
|
||||
@ -357,8 +346,8 @@ def do_id_pass(result): # type: (Config) -> None
|
||||
# Find candidates
|
||||
matches = difflib.get_close_matches(id.id, [v[0].id for v in declare_ids])
|
||||
if matches:
|
||||
matches_s = ', '.join('"{}"'.format(x) for x in matches)
|
||||
error += " These IDs look similar: {}.".format(matches_s)
|
||||
matches_s = ', '.join(f'"{x}"' for x in matches)
|
||||
error += f" These IDs look similar: {matches_s}."
|
||||
result.add_str_error(error, path)
|
||||
continue
|
||||
if not isinstance(match.type, MockObjClass) or not isinstance(id.type, MockObjClass):
|
||||
@ -377,7 +366,7 @@ def do_id_pass(result): # type: (Config) -> None
|
||||
id.id = v[0].id
|
||||
break
|
||||
else:
|
||||
result.add_str_error("Couldn't resolve ID for type '{}'".format(id.type), path)
|
||||
result.add_str_error(f"Couldn't resolve ID for type '{id.type}'", path)
|
||||
|
||||
|
||||
def recursive_check_replaceme(value):
|
||||
@ -389,7 +378,7 @@ def recursive_check_replaceme(value):
|
||||
return cv.Schema({cv.valid: recursive_check_replaceme})(value)
|
||||
if isinstance(value, ESPForceValue):
|
||||
pass
|
||||
if isinstance(value, string_types) and value == 'REPLACEME':
|
||||
if isinstance(value, str) and value == 'REPLACEME':
|
||||
raise cv.Invalid("Found 'REPLACEME' in configuration, this is most likely an error. "
|
||||
"Please make sure you have replaced all fields from the sample "
|
||||
"configuration.\n"
|
||||
@ -455,8 +444,8 @@ def validate_config(config):
|
||||
|
||||
while load_queue:
|
||||
domain, conf = load_queue.popleft()
|
||||
domain = text_type(domain)
|
||||
if domain.startswith(u'.'):
|
||||
domain = str(domain)
|
||||
if domain.startswith('.'):
|
||||
# Ignore top-level keys starting with a dot
|
||||
continue
|
||||
result.add_output_path([domain], domain)
|
||||
@ -464,7 +453,7 @@ def validate_config(config):
|
||||
component = get_component(domain)
|
||||
path = [domain]
|
||||
if component is None:
|
||||
result.add_str_error(u"Component not found: {}".format(domain), path)
|
||||
result.add_str_error(f"Component not found: {domain}", path)
|
||||
continue
|
||||
CORE.loaded_integrations.add(domain)
|
||||
|
||||
@ -492,24 +481,24 @@ def validate_config(config):
|
||||
for i, p_config in enumerate(conf):
|
||||
path = [domain, i]
|
||||
# Construct temporary unknown output path
|
||||
p_domain = u'{}.unknown'.format(domain)
|
||||
p_domain = f'{domain}.unknown'
|
||||
result.add_output_path(path, p_domain)
|
||||
result[domain][i] = p_config
|
||||
if not isinstance(p_config, dict):
|
||||
result.add_str_error(u"Platform schemas must be key-value pairs.", path)
|
||||
result.add_str_error("Platform schemas must be key-value pairs.", path)
|
||||
continue
|
||||
p_name = p_config.get('platform')
|
||||
if p_name is None:
|
||||
result.add_str_error(u"No platform specified! See 'platform' key.", path)
|
||||
result.add_str_error("No platform specified! See 'platform' key.", path)
|
||||
continue
|
||||
# Remove temp output path and construct new one
|
||||
result.remove_output_path(path, p_domain)
|
||||
p_domain = u'{}.{}'.format(domain, p_name)
|
||||
p_domain = f'{domain}.{p_name}'
|
||||
result.add_output_path(path, p_domain)
|
||||
# Try Load platform
|
||||
platform = get_platform(domain, p_name)
|
||||
if platform is None:
|
||||
result.add_str_error(u"Platform not found: '{}'".format(p_domain), path)
|
||||
result.add_str_error(f"Platform not found: '{p_domain}'", path)
|
||||
continue
|
||||
CORE.loaded_integrations.add(p_name)
|
||||
|
||||
@ -537,8 +526,8 @@ def validate_config(config):
|
||||
success = True
|
||||
for dependency in comp.dependencies:
|
||||
if dependency not in config:
|
||||
result.add_str_error(u"Component {} requires component {}"
|
||||
u"".format(domain, dependency), path)
|
||||
result.add_str_error("Component {} requires component {}"
|
||||
"".format(domain, dependency), path)
|
||||
success = False
|
||||
if not success:
|
||||
continue
|
||||
@ -546,22 +535,22 @@ def validate_config(config):
|
||||
success = True
|
||||
for conflict in comp.conflicts_with:
|
||||
if conflict in config:
|
||||
result.add_str_error(u"Component {} cannot be used together with component {}"
|
||||
u"".format(domain, conflict), path)
|
||||
result.add_str_error("Component {} cannot be used together with component {}"
|
||||
"".format(domain, conflict), path)
|
||||
success = False
|
||||
if not success:
|
||||
continue
|
||||
|
||||
if CORE.esp_platform not in comp.esp_platforms:
|
||||
result.add_str_error(u"Component {} doesn't support {}.".format(domain,
|
||||
result.add_str_error("Component {} doesn't support {}.".format(domain,
|
||||
CORE.esp_platform),
|
||||
path)
|
||||
continue
|
||||
|
||||
if not comp.is_platform_component and comp.config_schema is None and \
|
||||
not isinstance(conf, core.AutoLoad):
|
||||
result.add_str_error(u"Component {} cannot be loaded via YAML "
|
||||
u"(no CONFIG_SCHEMA).".format(domain), path)
|
||||
result.add_str_error("Component {} cannot be loaded via YAML "
|
||||
"(no CONFIG_SCHEMA).".format(domain), path)
|
||||
continue
|
||||
|
||||
if comp.is_multi_conf:
|
||||
@ -611,13 +600,13 @@ def _nested_getitem(data, path):
|
||||
|
||||
|
||||
def humanize_error(config, validation_error):
|
||||
validation_error = text_type(validation_error)
|
||||
validation_error = str(validation_error)
|
||||
m = re.match(r'^(.*?)\s*(?:for dictionary value )?@ data\[.*$', validation_error, re.DOTALL)
|
||||
if m is not None:
|
||||
validation_error = m.group(1)
|
||||
validation_error = validation_error.strip()
|
||||
if not validation_error.endswith(u'.'):
|
||||
validation_error += u'.'
|
||||
if not validation_error.endswith('.'):
|
||||
validation_error += '.'
|
||||
return validation_error
|
||||
|
||||
|
||||
@ -634,22 +623,22 @@ def _get_parent_name(path, config):
|
||||
|
||||
|
||||
def _format_vol_invalid(ex, config):
|
||||
# type: (vol.Invalid, Config) -> unicode
|
||||
message = u''
|
||||
# type: (vol.Invalid, Config) -> str
|
||||
message = ''
|
||||
|
||||
paren = _get_parent_name(ex.path[:-1], config)
|
||||
|
||||
if isinstance(ex, ExtraKeysInvalid):
|
||||
if ex.candidates:
|
||||
message += u'[{}] is an invalid option for [{}]. Did you mean {}?'.format(
|
||||
ex.path[-1], paren, u', '.join(u'[{}]'.format(x) for x in ex.candidates))
|
||||
message += '[{}] is an invalid option for [{}]. Did you mean {}?'.format(
|
||||
ex.path[-1], paren, ', '.join(f'[{x}]' for x in ex.candidates))
|
||||
else:
|
||||
message += u'[{}] is an invalid option for [{}]. Please check the indentation.'.format(
|
||||
message += '[{}] is an invalid option for [{}]. Please check the indentation.'.format(
|
||||
ex.path[-1], paren)
|
||||
elif u'extra keys not allowed' in text_type(ex):
|
||||
message += u'[{}] is an invalid option for [{}].'.format(ex.path[-1], paren)
|
||||
elif u'required key not provided' in text_type(ex):
|
||||
message += u"'{}' is a required option for [{}].".format(ex.path[-1], paren)
|
||||
elif 'extra keys not allowed' in str(ex):
|
||||
message += '[{}] is an invalid option for [{}].'.format(ex.path[-1], paren)
|
||||
elif 'required key not provided' in str(ex):
|
||||
message += "'{}' is a required option for [{}].".format(ex.path[-1], paren)
|
||||
else:
|
||||
message += humanize_error(config, ex)
|
||||
|
||||
@ -662,9 +651,8 @@ class InvalidYAMLError(EsphomeError):
|
||||
base = str(base_exc)
|
||||
except UnicodeDecodeError:
|
||||
base = repr(base_exc)
|
||||
base = decode_text(base)
|
||||
message = u"Invalid YAML syntax:\n\n{}".format(base)
|
||||
super(InvalidYAMLError, self).__init__(message)
|
||||
message = f"Invalid YAML syntax:\n\n{base}"
|
||||
super().__init__(message)
|
||||
self.base_exc = base_exc
|
||||
|
||||
|
||||
@ -680,7 +668,7 @@ def _load_config():
|
||||
except EsphomeError:
|
||||
raise
|
||||
except Exception:
|
||||
_LOGGER.error(u"Unexpected exception while reading configuration:")
|
||||
_LOGGER.error("Unexpected exception while reading configuration:")
|
||||
raise
|
||||
|
||||
return result
|
||||
@ -690,7 +678,7 @@ def load_config():
|
||||
try:
|
||||
return _load_config()
|
||||
except vol.Invalid as err:
|
||||
raise EsphomeError("Error while parsing config: {}".format(err))
|
||||
raise EsphomeError(f"Error while parsing config: {err}")
|
||||
|
||||
|
||||
def line_info(obj, highlight=True):
|
||||
@ -699,7 +687,7 @@ def line_info(obj, highlight=True):
|
||||
return None
|
||||
if isinstance(obj, ESPHomeDataBase) and obj.esp_range is not None:
|
||||
mark = obj.esp_range.start_mark
|
||||
source = u"[source {}:{}]".format(mark.document, mark.line + 1)
|
||||
source = "[source {}:{}]".format(mark.document, mark.line + 1)
|
||||
return color('cyan', source)
|
||||
return None
|
||||
|
||||
@ -715,82 +703,82 @@ def _print_on_next_line(obj):
|
||||
|
||||
|
||||
def dump_dict(config, path, at_root=True):
|
||||
# type: (Config, ConfigPath, bool) -> Tuple[unicode, bool]
|
||||
# type: (Config, ConfigPath, bool) -> Tuple[str, bool]
|
||||
conf = config.get_nested_item(path)
|
||||
ret = u''
|
||||
ret = ''
|
||||
multiline = False
|
||||
|
||||
if at_root:
|
||||
error = config.get_error_for_path(path)
|
||||
if error is not None:
|
||||
ret += u'\n' + color('bold_red', _format_vol_invalid(error, config)) + u'\n'
|
||||
ret += '\n' + color('bold_red', _format_vol_invalid(error, config)) + '\n'
|
||||
|
||||
if isinstance(conf, (list, tuple)):
|
||||
multiline = True
|
||||
if not conf:
|
||||
ret += u'[]'
|
||||
ret += '[]'
|
||||
multiline = False
|
||||
|
||||
for i in range(len(conf)):
|
||||
path_ = path + [i]
|
||||
error = config.get_error_for_path(path_)
|
||||
if error is not None:
|
||||
ret += u'\n' + color('bold_red', _format_vol_invalid(error, config)) + u'\n'
|
||||
ret += '\n' + color('bold_red', _format_vol_invalid(error, config)) + '\n'
|
||||
|
||||
sep = u'- '
|
||||
sep = '- '
|
||||
if config.is_in_error_path(path_):
|
||||
sep = color('red', sep)
|
||||
msg, _ = dump_dict(config, path_, at_root=False)
|
||||
msg = indent(msg)
|
||||
inf = line_info(config.get_nested_item(path_), highlight=config.is_in_error_path(path_))
|
||||
if inf is not None:
|
||||
msg = inf + u'\n' + msg
|
||||
msg = inf + '\n' + msg
|
||||
elif msg:
|
||||
msg = msg[2:]
|
||||
ret += sep + msg + u'\n'
|
||||
ret += sep + msg + '\n'
|
||||
elif isinstance(conf, dict):
|
||||
multiline = True
|
||||
if not conf:
|
||||
ret += u'{}'
|
||||
ret += '{}'
|
||||
multiline = False
|
||||
|
||||
for k in conf.keys():
|
||||
path_ = path + [k]
|
||||
error = config.get_error_for_path(path_)
|
||||
if error is not None:
|
||||
ret += u'\n' + color('bold_red', _format_vol_invalid(error, config)) + u'\n'
|
||||
ret += '\n' + color('bold_red', _format_vol_invalid(error, config)) + '\n'
|
||||
|
||||
st = u'{}: '.format(k)
|
||||
st = f'{k}: '
|
||||
if config.is_in_error_path(path_):
|
||||
st = color('red', st)
|
||||
msg, m = dump_dict(config, path_, at_root=False)
|
||||
|
||||
inf = line_info(config.get_nested_item(path_), highlight=config.is_in_error_path(path_))
|
||||
if m:
|
||||
msg = u'\n' + indent(msg)
|
||||
msg = '\n' + indent(msg)
|
||||
|
||||
if inf is not None:
|
||||
if m:
|
||||
msg = u' ' + inf + msg
|
||||
msg = ' ' + inf + msg
|
||||
else:
|
||||
msg = msg + u' ' + inf
|
||||
ret += st + msg + u'\n'
|
||||
msg = msg + ' ' + inf
|
||||
ret += st + msg + '\n'
|
||||
elif isinstance(conf, str):
|
||||
if is_secret(conf):
|
||||
conf = u'!secret {}'.format(is_secret(conf))
|
||||
conf = '!secret {}'.format(is_secret(conf))
|
||||
if not conf:
|
||||
conf += u"''"
|
||||
conf += "''"
|
||||
|
||||
if len(conf) > 80:
|
||||
conf = u'|-\n' + indent(conf)
|
||||
conf = '|-\n' + indent(conf)
|
||||
error = config.get_error_for_path(path)
|
||||
col = 'bold_red' if error else 'white'
|
||||
ret += color(col, text_type(conf))
|
||||
ret += color(col, str(conf))
|
||||
elif isinstance(conf, core.Lambda):
|
||||
if is_secret(conf):
|
||||
conf = u'!secret {}'.format(is_secret(conf))
|
||||
conf = '!secret {}'.format(is_secret(conf))
|
||||
|
||||
conf = u'!lambda |-\n' + indent(text_type(conf.value))
|
||||
conf = '!lambda |-\n' + indent(str(conf.value))
|
||||
error = config.get_error_for_path(path)
|
||||
col = 'bold_red' if error else 'white'
|
||||
ret += color(col, conf)
|
||||
@ -799,8 +787,8 @@ def dump_dict(config, path, at_root=True):
|
||||
else:
|
||||
error = config.get_error_for_path(path)
|
||||
col = 'bold_red' if error else 'white'
|
||||
ret += color(col, text_type(conf))
|
||||
multiline = u'\n' in ret
|
||||
ret += color(col, str(conf))
|
||||
multiline = '\n' in ret
|
||||
|
||||
return ret, multiline
|
||||
|
||||
@ -830,20 +818,20 @@ def read_config():
|
||||
try:
|
||||
res = load_config()
|
||||
except EsphomeError as err:
|
||||
_LOGGER.error(u"Error while reading config: %s", err)
|
||||
_LOGGER.error("Error while reading config: %s", err)
|
||||
return None
|
||||
if res.errors:
|
||||
if not CORE.verbose:
|
||||
res = strip_default_ids(res)
|
||||
|
||||
safe_print(color('bold_red', u"Failed config"))
|
||||
safe_print(color('bold_red', "Failed config"))
|
||||
safe_print('')
|
||||
for path, domain in res.output_paths:
|
||||
if not res.is_in_error_path(path):
|
||||
continue
|
||||
|
||||
safe_print(color('bold_red', u'{}:'.format(domain)) + u' ' +
|
||||
(line_info(res.get_nested_item(path)) or u''))
|
||||
safe_print(color('bold_red', f'{domain}:') + ' ' +
|
||||
(line_info(res.get_nested_item(path)) or ''))
|
||||
safe_print(indent(dump_dict(res, path)[0]))
|
||||
return None
|
||||
return OrderedDict(res)
|
||||
|
@ -1,22 +1,19 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from esphome.core import CORE
|
||||
from esphome.helpers import read_file
|
||||
from esphome.py_compat import safe_input
|
||||
|
||||
|
||||
def read_config_file(path):
|
||||
# type: (basestring) -> unicode
|
||||
# type: (str) -> str
|
||||
if CORE.vscode and (not CORE.ace or
|
||||
os.path.abspath(path) == os.path.abspath(CORE.config_path)):
|
||||
print(json.dumps({
|
||||
'type': 'read_file',
|
||||
'path': path,
|
||||
}))
|
||||
data = json.loads(safe_input())
|
||||
data = json.loads(input())
|
||||
assert data['type'] == 'file_response'
|
||||
return data['content']
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
# coding=utf-8
|
||||
"""Helpers for config validation using voluptuous."""
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import os
|
||||
@ -20,7 +18,6 @@ from esphome.const import CONF_AVAILABILITY, CONF_COMMAND_TOPIC, CONF_DISCOVERY,
|
||||
from esphome.core import CORE, HexInt, IPAddress, Lambda, TimePeriod, TimePeriodMicroseconds, \
|
||||
TimePeriodMilliseconds, TimePeriodSeconds, TimePeriodMinutes
|
||||
from esphome.helpers import list_starts_with, add_class_to_obj
|
||||
from esphome.py_compat import integer_types, string_types, text_type, IS_PY2, decode_text
|
||||
from esphome.voluptuous_schema import _Schema
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -43,7 +40,7 @@ ALLOW_EXTRA = vol.ALLOW_EXTRA
|
||||
UNDEFINED = vol.UNDEFINED
|
||||
RequiredFieldInvalid = vol.RequiredFieldInvalid
|
||||
|
||||
ALLOWED_NAME_CHARS = u'abcdefghijklmnopqrstuvwxyz0123456789_'
|
||||
ALLOWED_NAME_CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789_'
|
||||
|
||||
RESERVED_IDS = [
|
||||
# C++ keywords http://en.cppreference.com/w/cpp/keyword
|
||||
@ -82,7 +79,7 @@ class Optional(vol.Optional):
|
||||
"""
|
||||
|
||||
def __init__(self, key, default=UNDEFINED):
|
||||
super(Optional, self).__init__(key, default=default)
|
||||
super().__init__(key, default=default)
|
||||
|
||||
|
||||
class Required(vol.Required):
|
||||
@ -94,7 +91,7 @@ class Required(vol.Required):
|
||||
"""
|
||||
|
||||
def __init__(self, key):
|
||||
super(Required, self).__init__(key)
|
||||
super().__init__(key)
|
||||
|
||||
|
||||
def check_not_templatable(value):
|
||||
@ -105,7 +102,7 @@ def check_not_templatable(value):
|
||||
def alphanumeric(value):
|
||||
if value is None:
|
||||
raise Invalid("string value is None")
|
||||
value = text_type(value)
|
||||
value = str(value)
|
||||
if not value.isalnum():
|
||||
raise Invalid("string value is not alphanumeric")
|
||||
return value
|
||||
@ -115,8 +112,8 @@ def valid_name(value):
|
||||
value = string_strict(value)
|
||||
for c in value:
|
||||
if c not in ALLOWED_NAME_CHARS:
|
||||
raise Invalid(u"'{}' is an invalid character for names. Valid characters are: {}"
|
||||
u" (lowercase, no spaces)".format(c, ALLOWED_NAME_CHARS))
|
||||
raise Invalid(f"'{c}' is an invalid character for names. Valid characters are: "
|
||||
f"{ALLOWED_NAME_CHARS} (lowercase, no spaces)")
|
||||
return value
|
||||
|
||||
|
||||
@ -131,10 +128,10 @@ def string(value):
|
||||
raise Invalid("string value cannot be dictionary or list.")
|
||||
if isinstance(value, bool):
|
||||
raise Invalid("Auto-converted this value to boolean, please wrap the value in quotes.")
|
||||
if isinstance(value, text_type):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
if value is not None:
|
||||
return text_type(value)
|
||||
return str(value)
|
||||
raise Invalid("string value is None")
|
||||
|
||||
|
||||
@ -142,10 +139,8 @@ def string_strict(value):
|
||||
"""Like string, but only allows strings, and does not automatically convert other types to
|
||||
strings."""
|
||||
check_not_templatable(value)
|
||||
if isinstance(value, text_type):
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
if isinstance(value, string_types):
|
||||
return text_type(value)
|
||||
raise Invalid("Must be string, got {}. did you forget putting quotes "
|
||||
"around the value?".format(type(value)))
|
||||
|
||||
@ -172,14 +167,14 @@ def boolean(value):
|
||||
check_not_templatable(value)
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
if isinstance(value, string_types):
|
||||
if isinstance(value, str):
|
||||
value = value.lower()
|
||||
if value in ('true', 'yes', 'on', 'enable'):
|
||||
return True
|
||||
if value in ('false', 'no', 'off', 'disable'):
|
||||
return False
|
||||
raise Invalid(u"Expected boolean value, but cannot convert {} to a boolean. "
|
||||
u"Please use 'true' or 'false'".format(value))
|
||||
raise Invalid("Expected boolean value, but cannot convert {} to a boolean. "
|
||||
"Please use 'true' or 'false'".format(value))
|
||||
|
||||
|
||||
def ensure_list(*validators):
|
||||
@ -228,7 +223,7 @@ def int_(value):
|
||||
Automatically also converts strings to ints.
|
||||
"""
|
||||
check_not_templatable(value)
|
||||
if isinstance(value, integer_types):
|
||||
if isinstance(value, int):
|
||||
return value
|
||||
if isinstance(value, float):
|
||||
if int(value) == value:
|
||||
@ -242,15 +237,15 @@ def int_(value):
|
||||
try:
|
||||
return int(value, base)
|
||||
except ValueError:
|
||||
raise Invalid(u"Expected integer, but cannot parse {} as an integer".format(value))
|
||||
raise Invalid(f"Expected integer, but cannot parse {value} as an integer")
|
||||
|
||||
|
||||
def int_range(min=None, max=None, min_included=True, max_included=True):
|
||||
"""Validate that the config option is an integer in the given range."""
|
||||
if min is not None:
|
||||
assert isinstance(min, integer_types)
|
||||
assert isinstance(min, int)
|
||||
if max is not None:
|
||||
assert isinstance(max, integer_types)
|
||||
assert isinstance(max, int)
|
||||
return All(int_, Range(min=min, max=max, min_included=min_included, max_included=max_included))
|
||||
|
||||
|
||||
@ -291,14 +286,14 @@ def validate_id_name(value):
|
||||
valid_chars = ascii_letters + digits + '_'
|
||||
for char in value:
|
||||
if char not in valid_chars:
|
||||
raise Invalid(u"IDs must only consist of upper/lowercase characters, the underscore"
|
||||
u"character and numbers. The character '{}' cannot be used"
|
||||
u"".format(char))
|
||||
raise Invalid("IDs must only consist of upper/lowercase characters, the underscore"
|
||||
"character and numbers. The character '{}' cannot be used"
|
||||
"".format(char))
|
||||
if value in RESERVED_IDS:
|
||||
raise Invalid(u"ID '{}' is reserved internally and cannot be used".format(value))
|
||||
raise Invalid(f"ID '{value}' is reserved internally and cannot be used")
|
||||
if value in CORE.loaded_integrations:
|
||||
raise Invalid(u"ID '{}' conflicts with the name of an esphome integration, please use "
|
||||
u"another ID name.".format(value))
|
||||
raise Invalid("ID '{}' conflicts with the name of an esphome integration, please use "
|
||||
"another ID name.".format(value))
|
||||
return value
|
||||
|
||||
|
||||
@ -358,7 +353,7 @@ def only_on(platforms):
|
||||
|
||||
def validator_(obj):
|
||||
if CORE.esp_platform not in platforms:
|
||||
raise Invalid(u"This feature is only available on {}".format(platforms))
|
||||
raise Invalid(f"This feature is only available on {platforms}")
|
||||
return obj
|
||||
|
||||
return validator_
|
||||
@ -463,7 +458,7 @@ def time_period_str_unit(value):
|
||||
"'{0}s'?".format(value))
|
||||
if isinstance(value, TimePeriod):
|
||||
value = str(value)
|
||||
if not isinstance(value, string_types):
|
||||
if not isinstance(value, str):
|
||||
raise Invalid("Expected string for time period with unit.")
|
||||
|
||||
unit_to_kwarg = {
|
||||
@ -485,8 +480,8 @@ def time_period_str_unit(value):
|
||||
match = re.match(r"^([-+]?[0-9]*\.?[0-9]*)\s*(\w*)$", value)
|
||||
|
||||
if match is None:
|
||||
raise Invalid(u"Expected time period with unit, "
|
||||
u"got {}".format(value))
|
||||
raise Invalid("Expected time period with unit, "
|
||||
"got {}".format(value))
|
||||
kwarg = unit_to_kwarg[one_of(*unit_to_kwarg)(match.group(2))]
|
||||
|
||||
return TimePeriod(**{kwarg: float(match.group(1))})
|
||||
@ -545,7 +540,7 @@ def time_of_day(value):
|
||||
try:
|
||||
date = datetime.strptime(value, '%H:%M:%S %p')
|
||||
except ValueError:
|
||||
raise Invalid("Invalid time of day: {}".format(err))
|
||||
raise Invalid(f"Invalid time of day: {err}")
|
||||
|
||||
return {
|
||||
CONF_HOUR: date.hour,
|
||||
@ -577,7 +572,7 @@ def uuid(value):
|
||||
|
||||
METRIC_SUFFIXES = {
|
||||
'E': 1e18, 'P': 1e15, 'T': 1e12, 'G': 1e9, 'M': 1e6, 'k': 1e3, 'da': 10, 'd': 1e-1,
|
||||
'c': 1e-2, 'm': 0.001, u'µ': 1e-6, 'u': 1e-6, 'n': 1e-9, 'p': 1e-12, 'f': 1e-15, 'a': 1e-18,
|
||||
'c': 1e-2, 'm': 0.001, 'µ': 1e-6, 'u': 1e-6, 'n': 1e-9, 'p': 1e-12, 'f': 1e-15, 'a': 1e-18,
|
||||
'': 1
|
||||
}
|
||||
|
||||
@ -594,11 +589,11 @@ def float_with_unit(quantity, regex_suffix, optional_unit=False):
|
||||
match = pattern.match(string(value))
|
||||
|
||||
if match is None:
|
||||
raise Invalid(u"Expected {} with unit, got {}".format(quantity, value))
|
||||
raise Invalid(f"Expected {quantity} with unit, got {value}")
|
||||
|
||||
mantissa = float(match.group(1))
|
||||
if match.group(2) not in METRIC_SUFFIXES:
|
||||
raise Invalid(u"Invalid {} suffix {}".format(quantity, match.group(2)))
|
||||
raise Invalid("Invalid {} suffix {}".format(quantity, match.group(2)))
|
||||
|
||||
multiplier = METRIC_SUFFIXES[match.group(2)]
|
||||
return mantissa * multiplier
|
||||
@ -606,30 +601,17 @@ def float_with_unit(quantity, regex_suffix, optional_unit=False):
|
||||
return validator
|
||||
|
||||
|
||||
frequency = float_with_unit("frequency", u"(Hz|HZ|hz)?")
|
||||
resistance = float_with_unit("resistance", u"(Ω|Ω|ohm|Ohm|OHM)?")
|
||||
current = float_with_unit("current", u"(a|A|amp|Amp|amps|Amps|ampere|Ampere)?")
|
||||
voltage = float_with_unit("voltage", u"(v|V|volt|Volts)?")
|
||||
distance = float_with_unit("distance", u"(m)")
|
||||
framerate = float_with_unit("framerate", u"(FPS|fps|Fps|FpS|Hz)")
|
||||
angle = float_with_unit("angle", u"(°|deg)", optional_unit=True)
|
||||
_temperature_c = float_with_unit("temperature", u"(°C|° C|°|C)?")
|
||||
_temperature_k = float_with_unit("temperature", u"(° K|° K|K)?")
|
||||
_temperature_f = float_with_unit("temperature", u"(°F|° F|F)?")
|
||||
decibel = float_with_unit("decibel", u"(dB|dBm|db|dbm)", optional_unit=True)
|
||||
|
||||
if IS_PY2:
|
||||
# Override voluptuous invalid to unicode for py2
|
||||
def _vol_invalid_unicode(self):
|
||||
path = u' @ data[%s]' % u']['.join(map(repr, self.path)) \
|
||||
if self.path else u''
|
||||
# pylint: disable=no-member
|
||||
output = decode_text(self.message)
|
||||
if self.error_type:
|
||||
output += u' for ' + self.error_type
|
||||
return output + path
|
||||
|
||||
Invalid.__unicode__ = _vol_invalid_unicode
|
||||
frequency = float_with_unit("frequency", "(Hz|HZ|hz)?")
|
||||
resistance = float_with_unit("resistance", "(Ω|Ω|ohm|Ohm|OHM)?")
|
||||
current = float_with_unit("current", "(a|A|amp|Amp|amps|Amps|ampere|Ampere)?")
|
||||
voltage = float_with_unit("voltage", "(v|V|volt|Volts)?")
|
||||
distance = float_with_unit("distance", "(m)")
|
||||
framerate = float_with_unit("framerate", "(FPS|fps|Fps|FpS|Hz)")
|
||||
angle = float_with_unit("angle", "(°|deg)", optional_unit=True)
|
||||
_temperature_c = float_with_unit("temperature", "(°C|° C|°|C)?")
|
||||
_temperature_k = float_with_unit("temperature", "(° K|° K|K)?")
|
||||
_temperature_f = float_with_unit("temperature", "(°F|° F|F)?")
|
||||
decibel = float_with_unit("decibel", "(dB|dBm|db|dbm)", optional_unit=True)
|
||||
|
||||
|
||||
def temperature(value):
|
||||
@ -672,15 +654,15 @@ def validate_bytes(value):
|
||||
match = re.match(r"^([0-9]+)\s*(\w*?)(?:byte|B|b)?s?$", value)
|
||||
|
||||
if match is None:
|
||||
raise Invalid(u"Expected number of bytes with unit, got {}".format(value))
|
||||
raise Invalid(f"Expected number of bytes with unit, got {value}")
|
||||
|
||||
mantissa = int(match.group(1))
|
||||
if match.group(2) not in METRIC_SUFFIXES:
|
||||
raise Invalid(u"Invalid metric suffix {}".format(match.group(2)))
|
||||
raise Invalid("Invalid metric suffix {}".format(match.group(2)))
|
||||
multiplier = METRIC_SUFFIXES[match.group(2)]
|
||||
if multiplier < 1:
|
||||
raise Invalid(u"Only suffixes with positive exponents are supported. "
|
||||
u"Got {}".format(match.group(2)))
|
||||
raise Invalid("Only suffixes with positive exponents are supported. "
|
||||
"Got {}".format(match.group(2)))
|
||||
return int(mantissa * multiplier)
|
||||
|
||||
|
||||
@ -701,7 +683,7 @@ def domain(value):
|
||||
try:
|
||||
return str(ipv4(value))
|
||||
except Invalid:
|
||||
raise Invalid("Invalid domain: {}".format(value))
|
||||
raise Invalid(f"Invalid domain: {value}")
|
||||
|
||||
|
||||
def domain_name(value):
|
||||
@ -730,7 +712,7 @@ def ssid(value):
|
||||
def ipv4(value):
|
||||
if isinstance(value, list):
|
||||
parts = value
|
||||
elif isinstance(value, string_types):
|
||||
elif isinstance(value, str):
|
||||
parts = value.split('.')
|
||||
elif isinstance(value, IPAddress):
|
||||
return value
|
||||
@ -806,7 +788,7 @@ def mqtt_qos(value):
|
||||
try:
|
||||
value = int(value)
|
||||
except (TypeError, ValueError):
|
||||
raise Invalid(u"MQTT Quality of Service must be integer, got {}".format(value))
|
||||
raise Invalid(f"MQTT Quality of Service must be integer, got {value}")
|
||||
return one_of(0, 1, 2)(value)
|
||||
|
||||
|
||||
@ -814,7 +796,7 @@ def requires_component(comp):
|
||||
"""Validate that this option can only be specified when the component `comp` is loaded."""
|
||||
def validator(value):
|
||||
if comp not in CORE.raw_config:
|
||||
raise Invalid("This option requires component {}".format(comp))
|
||||
raise Invalid(f"This option requires component {comp}")
|
||||
return value
|
||||
|
||||
return validator
|
||||
@ -839,7 +821,7 @@ def percentage(value):
|
||||
|
||||
|
||||
def possibly_negative_percentage(value):
|
||||
has_percent_sign = isinstance(value, string_types) and value.endswith('%')
|
||||
has_percent_sign = isinstance(value, str) and value.endswith('%')
|
||||
if has_percent_sign:
|
||||
value = float(value[:-1].rstrip()) / 100.0
|
||||
if value > 1:
|
||||
@ -856,7 +838,7 @@ def possibly_negative_percentage(value):
|
||||
|
||||
|
||||
def percentage_int(value):
|
||||
if isinstance(value, string_types) and value.endswith('%'):
|
||||
if isinstance(value, str) and value.endswith('%'):
|
||||
value = int(value[:-1].rstrip())
|
||||
return value
|
||||
|
||||
@ -916,7 +898,7 @@ def one_of(*values, **kwargs):
|
||||
- *float* (``bool``, default=False): Whether to convert the incoming values to floats.
|
||||
- *space* (``str``, default=' '): What to convert spaces in the input string to.
|
||||
"""
|
||||
options = u', '.join(u"'{}'".format(x) for x in values)
|
||||
options = ', '.join(f"'{x}'" for x in values)
|
||||
lower = kwargs.pop('lower', False)
|
||||
upper = kwargs.pop('upper', False)
|
||||
string_ = kwargs.pop('string', False) or lower or upper
|
||||
@ -940,13 +922,13 @@ def one_of(*values, **kwargs):
|
||||
value = Upper(value)
|
||||
if value not in values:
|
||||
import difflib
|
||||
options_ = [text_type(x) for x in values]
|
||||
option = text_type(value)
|
||||
options_ = [str(x) for x in values]
|
||||
option = str(value)
|
||||
matches = difflib.get_close_matches(option, options_)
|
||||
if matches:
|
||||
raise Invalid(u"Unknown value '{}', did you mean {}?"
|
||||
u"".format(value, u", ".join(u"'{}'".format(x) for x in matches)))
|
||||
raise Invalid(u"Unknown value '{}', valid options are {}.".format(value, options))
|
||||
raise Invalid("Unknown value '{}', did you mean {}?"
|
||||
"".format(value, ", ".join(f"'{x}'" for x in matches)))
|
||||
raise Invalid(f"Unknown value '{value}', valid options are {options}.")
|
||||
return value
|
||||
|
||||
return validator
|
||||
@ -996,7 +978,7 @@ def returning_lambda(value):
|
||||
Additionally, make sure the lambda returns something.
|
||||
"""
|
||||
value = lambda_(value)
|
||||
if u'return' not in value.value:
|
||||
if 'return' not in value.value:
|
||||
raise Invalid("Lambda doesn't contain a 'return' statement, but the lambda "
|
||||
"is expected to return a value. \n"
|
||||
"Please make sure the lambda contains at least one "
|
||||
@ -1007,24 +989,23 @@ def returning_lambda(value):
|
||||
def dimensions(value):
|
||||
if isinstance(value, list):
|
||||
if len(value) != 2:
|
||||
raise Invalid(u"Dimensions must have a length of two, not {}".format(len(value)))
|
||||
raise Invalid("Dimensions must have a length of two, not {}".format(len(value)))
|
||||
try:
|
||||
width, height = int(value[0]), int(value[1])
|
||||
except ValueError:
|
||||
raise Invalid(u"Width and height dimensions must be integers")
|
||||
raise Invalid("Width and height dimensions must be integers")
|
||||
if width <= 0 or height <= 0:
|
||||
raise Invalid(u"Width and height must at least be 1")
|
||||
raise Invalid("Width and height must at least be 1")
|
||||
return [width, height]
|
||||
value = string(value)
|
||||
match = re.match(r"\s*([0-9]+)\s*[xX]\s*([0-9]+)\s*", value)
|
||||
if not match:
|
||||
raise Invalid(u"Invalid value '{}' for dimensions. Only WIDTHxHEIGHT is allowed.")
|
||||
raise Invalid("Invalid value '{}' for dimensions. Only WIDTHxHEIGHT is allowed.")
|
||||
return dimensions([match.group(1), match.group(2)])
|
||||
|
||||
|
||||
def directory(value):
|
||||
import json
|
||||
from esphome.py_compat import safe_input
|
||||
value = string(value)
|
||||
path = CORE.relative_config_path(value)
|
||||
|
||||
@ -1034,25 +1015,24 @@ def directory(value):
|
||||
'type': 'check_directory_exists',
|
||||
'path': path,
|
||||
}))
|
||||
data = json.loads(safe_input())
|
||||
data = json.loads(input())
|
||||
assert data['type'] == 'directory_exists_response'
|
||||
if data['content']:
|
||||
return value
|
||||
raise Invalid(u"Could not find directory '{}'. Please make sure it exists (full path: {})."
|
||||
u"".format(path, os.path.abspath(path)))
|
||||
raise Invalid("Could not find directory '{}'. Please make sure it exists (full path: {})."
|
||||
"".format(path, os.path.abspath(path)))
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise Invalid(u"Could not find directory '{}'. Please make sure it exists (full path: {})."
|
||||
u"".format(path, os.path.abspath(path)))
|
||||
raise Invalid("Could not find directory '{}'. Please make sure it exists (full path: {})."
|
||||
"".format(path, os.path.abspath(path)))
|
||||
if not os.path.isdir(path):
|
||||
raise Invalid(u"Path '{}' is not a directory (full path: {})."
|
||||
u"".format(path, os.path.abspath(path)))
|
||||
raise Invalid("Path '{}' is not a directory (full path: {})."
|
||||
"".format(path, os.path.abspath(path)))
|
||||
return value
|
||||
|
||||
|
||||
def file_(value):
|
||||
import json
|
||||
from esphome.py_compat import safe_input
|
||||
value = string(value)
|
||||
path = CORE.relative_config_path(value)
|
||||
|
||||
@ -1062,19 +1042,19 @@ def file_(value):
|
||||
'type': 'check_file_exists',
|
||||
'path': path,
|
||||
}))
|
||||
data = json.loads(safe_input())
|
||||
data = json.loads(input())
|
||||
assert data['type'] == 'file_exists_response'
|
||||
if data['content']:
|
||||
return value
|
||||
raise Invalid(u"Could not find file '{}'. Please make sure it exists (full path: {})."
|
||||
u"".format(path, os.path.abspath(path)))
|
||||
raise Invalid("Could not find file '{}'. Please make sure it exists (full path: {})."
|
||||
"".format(path, os.path.abspath(path)))
|
||||
|
||||
if not os.path.exists(path):
|
||||
raise Invalid(u"Could not find file '{}'. Please make sure it exists (full path: {})."
|
||||
u"".format(path, os.path.abspath(path)))
|
||||
raise Invalid("Could not find file '{}'. Please make sure it exists (full path: {})."
|
||||
"".format(path, os.path.abspath(path)))
|
||||
if not os.path.isfile(path):
|
||||
raise Invalid(u"Path '{}' is not a file (full path: {})."
|
||||
u"".format(path, os.path.abspath(path)))
|
||||
raise Invalid("Path '{}' is not a file (full path: {})."
|
||||
"".format(path, os.path.abspath(path)))
|
||||
return value
|
||||
|
||||
|
||||
@ -1092,7 +1072,7 @@ def entity_id(value):
|
||||
for x in value.split('.'):
|
||||
for c in x:
|
||||
if c not in ENTITY_ID_CHARACTERS:
|
||||
raise Invalid("Invalid character for entity ID: {}".format(c))
|
||||
raise Invalid(f"Invalid character for entity ID: {c}")
|
||||
return value
|
||||
|
||||
|
||||
@ -1103,9 +1083,9 @@ def extract_keys(schema):
|
||||
assert isinstance(schema, dict)
|
||||
keys = []
|
||||
for skey in list(schema.keys()):
|
||||
if isinstance(skey, string_types):
|
||||
if isinstance(skey, str):
|
||||
keys.append(skey)
|
||||
elif isinstance(skey, vol.Marker) and isinstance(skey.schema, string_types):
|
||||
elif isinstance(skey, vol.Marker) and isinstance(skey.schema, str):
|
||||
keys.append(skey.schema)
|
||||
else:
|
||||
raise ValueError()
|
||||
@ -1136,14 +1116,14 @@ class GenerateID(Optional):
|
||||
"""Mark this key as being an auto-generated ID key."""
|
||||
|
||||
def __init__(self, key=CONF_ID):
|
||||
super(GenerateID, self).__init__(key, default=lambda: None)
|
||||
super().__init__(key, default=lambda: None)
|
||||
|
||||
|
||||
class SplitDefault(Optional):
|
||||
"""Mark this key to have a split default for ESP8266/ESP32."""
|
||||
|
||||
def __init__(self, key, esp8266=vol.UNDEFINED, esp32=vol.UNDEFINED):
|
||||
super(SplitDefault, self).__init__(key)
|
||||
super().__init__(key)
|
||||
self._esp8266_default = vol.default_factory(esp8266)
|
||||
self._esp32_default = vol.default_factory(esp32)
|
||||
|
||||
@ -1165,7 +1145,7 @@ class OnlyWith(Optional):
|
||||
"""Set the default value only if the given component is loaded."""
|
||||
|
||||
def __init__(self, key, component, default=None):
|
||||
super(OnlyWith, self).__init__(key)
|
||||
super().__init__(key)
|
||||
self._component = component
|
||||
self._default = vol.default_factory(default)
|
||||
|
||||
@ -1207,21 +1187,21 @@ def validate_registry_entry(name, registry):
|
||||
ignore_keys = extract_keys(base_schema)
|
||||
|
||||
def validator(value):
|
||||
if isinstance(value, string_types):
|
||||
if isinstance(value, str):
|
||||
value = {value: {}}
|
||||
if not isinstance(value, dict):
|
||||
raise Invalid(u"{} must consist of key-value mapping! Got {}"
|
||||
u"".format(name.title(), value))
|
||||
raise Invalid("{} must consist of key-value mapping! Got {}"
|
||||
"".format(name.title(), value))
|
||||
key = next((x for x in value if x not in ignore_keys), None)
|
||||
if key is None:
|
||||
raise Invalid(u"Key missing from {}! Got {}".format(name, value))
|
||||
raise Invalid(f"Key missing from {name}! Got {value}")
|
||||
if key not in registry:
|
||||
raise Invalid(u"Unable to find {} with the name '{}'".format(name, key), [key])
|
||||
raise Invalid(f"Unable to find {name} with the name '{key}'", [key])
|
||||
key2 = next((x for x in value if x != key and x not in ignore_keys), None)
|
||||
if key2 is not None:
|
||||
raise Invalid(u"Cannot have two {0}s in one item. Key '{1}' overrides '{2}'! "
|
||||
u"Did you forget to indent the block inside the {0}?"
|
||||
u"".format(name, key, key2))
|
||||
raise Invalid("Cannot have two {0}s in one item. Key '{1}' overrides '{2}'! "
|
||||
"Did you forget to indent the block inside the {0}?"
|
||||
"".format(name, key, key2))
|
||||
|
||||
if value[key] is None:
|
||||
value[key] = {}
|
||||
@ -1296,7 +1276,7 @@ def polling_component_schema(default_update_interval):
|
||||
return COMPONENT_SCHEMA.extend({
|
||||
Required(CONF_UPDATE_INTERVAL): default_update_interval,
|
||||
})
|
||||
assert isinstance(default_update_interval, string_types)
|
||||
assert isinstance(default_update_interval, str)
|
||||
return COMPONENT_SCHEMA.extend({
|
||||
Optional(CONF_UPDATE_INTERVAL, default=default_update_interval): update_interval,
|
||||
})
|
||||
|
@ -1,17 +1,16 @@
|
||||
# coding=utf-8
|
||||
"""Constants used by esphome."""
|
||||
|
||||
MAJOR_VERSION = 1
|
||||
MINOR_VERSION = 15
|
||||
PATCH_VERSION = '0-dev'
|
||||
__short_version__ = '{}.{}'.format(MAJOR_VERSION, MINOR_VERSION)
|
||||
__version__ = '{}.{}'.format(__short_version__, PATCH_VERSION)
|
||||
__short_version__ = f'{MAJOR_VERSION}.{MINOR_VERSION}'
|
||||
__version__ = f'{__short_version__}.{PATCH_VERSION}'
|
||||
|
||||
ESP_PLATFORM_ESP32 = 'ESP32'
|
||||
ESP_PLATFORM_ESP8266 = 'ESP8266'
|
||||
ESP_PLATFORMS = [ESP_PLATFORM_ESP32, ESP_PLATFORM_ESP8266]
|
||||
|
||||
ALLOWED_NAME_CHARS = u'abcdefghijklmnopqrstuvwxyz0123456789_'
|
||||
ALLOWED_NAME_CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789_'
|
||||
ARDUINO_VERSION_ESP32_DEV = 'https://github.com/platformio/platform-espressif32.git#feature/stage'
|
||||
ARDUINO_VERSION_ESP32_1_0_0 = 'espressif32@1.5.0'
|
||||
ARDUINO_VERSION_ESP32_1_0_1 = 'espressif32@1.6.0'
|
||||
@ -544,12 +543,12 @@ ICON_WEATHER_WINDY = 'mdi:weather-windy'
|
||||
ICON_WIFI = 'mdi:wifi'
|
||||
|
||||
UNIT_AMPERE = 'A'
|
||||
UNIT_CELSIUS = u'°C'
|
||||
UNIT_COUNTS_PER_CUBIC_METER = u'#/m³'
|
||||
UNIT_CELSIUS = '°C'
|
||||
UNIT_COUNTS_PER_CUBIC_METER = '#/m³'
|
||||
UNIT_DECIBEL = 'dB'
|
||||
UNIT_DECIBEL_MILLIWATT = 'dBm'
|
||||
UNIT_DEGREE_PER_SECOND = u'°/s'
|
||||
UNIT_DEGREES = u'°'
|
||||
UNIT_DEGREE_PER_SECOND = '°/s'
|
||||
UNIT_DEGREES = '°'
|
||||
UNIT_EMPTY = ''
|
||||
UNIT_G = 'G'
|
||||
UNIT_HECTOPASCAL = 'hPa'
|
||||
@ -559,12 +558,12 @@ UNIT_KILOMETER = 'km'
|
||||
UNIT_KILOMETER_PER_HOUR = 'km/h'
|
||||
UNIT_LUX = 'lx'
|
||||
UNIT_METER = 'm'
|
||||
UNIT_METER_PER_SECOND_SQUARED = u'm/s²'
|
||||
UNIT_MICROGRAMS_PER_CUBIC_METER = u'µg/m³'
|
||||
UNIT_METER_PER_SECOND_SQUARED = 'm/s²'
|
||||
UNIT_MICROGRAMS_PER_CUBIC_METER = 'µg/m³'
|
||||
UNIT_MICROMETER = 'µm'
|
||||
UNIT_MICROSIEMENS_PER_CENTIMETER = u'µS/cm'
|
||||
UNIT_MICROTESLA = u'µT'
|
||||
UNIT_OHM = u'Ω'
|
||||
UNIT_MICROSIEMENS_PER_CENTIMETER = 'µS/cm'
|
||||
UNIT_MICROTESLA = 'µT'
|
||||
UNIT_OHM = 'Ω'
|
||||
UNIT_PARTS_PER_BILLION = 'ppb'
|
||||
UNIT_PARTS_PER_MILLION = 'ppm'
|
||||
UNIT_PERCENT = '%'
|
||||
|
129
esphome/core.py
129
esphome/core.py
@ -13,7 +13,6 @@ from typing import Any, Dict, List, Optional, Set # noqa
|
||||
from esphome.const import CONF_ARDUINO_VERSION, SOURCE_FILE_EXTENSIONS, \
|
||||
CONF_COMMENT, CONF_ESPHOME, CONF_USE_ADDRESS, CONF_WIFI
|
||||
from esphome.helpers import ensure_unique_string, is_hassio
|
||||
from esphome.py_compat import IS_PY2, integer_types, text_type, string_types
|
||||
from esphome.util import OrderedDict
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -23,53 +22,47 @@ class EsphomeError(Exception):
|
||||
"""General ESPHome exception occurred."""
|
||||
|
||||
|
||||
if IS_PY2:
|
||||
base_int = long
|
||||
else:
|
||||
base_int = int
|
||||
|
||||
|
||||
class HexInt(base_int):
|
||||
class HexInt(int):
|
||||
def __str__(self):
|
||||
if 0 <= self <= 255:
|
||||
return "0x{:02X}".format(self)
|
||||
return "0x{:X}".format(self)
|
||||
return f"0x{self:02X}"
|
||||
return f"0x{self:X}"
|
||||
|
||||
|
||||
class IPAddress(object):
|
||||
class IPAddress:
|
||||
def __init__(self, *args):
|
||||
if len(args) != 4:
|
||||
raise ValueError(u"IPAddress must consist up 4 items")
|
||||
raise ValueError("IPAddress must consist up 4 items")
|
||||
self.args = args
|
||||
|
||||
def __str__(self):
|
||||
return '.'.join(str(x) for x in self.args)
|
||||
|
||||
|
||||
class MACAddress(object):
|
||||
class MACAddress:
|
||||
def __init__(self, *parts):
|
||||
if len(parts) != 6:
|
||||
raise ValueError(u"MAC Address must consist of 6 items")
|
||||
raise ValueError("MAC Address must consist of 6 items")
|
||||
self.parts = parts
|
||||
|
||||
def __str__(self):
|
||||
return ':'.join('{:02X}'.format(part) for part in self.parts)
|
||||
return ':'.join(f'{part:02X}' for part in self.parts)
|
||||
|
||||
@property
|
||||
def as_hex(self):
|
||||
from esphome.cpp_generator import RawExpression
|
||||
|
||||
num = ''.join('{:02X}'.format(part) for part in self.parts)
|
||||
return RawExpression('0x{}ULL'.format(num))
|
||||
num = ''.join(f'{part:02X}' for part in self.parts)
|
||||
return RawExpression(f'0x{num}ULL')
|
||||
|
||||
|
||||
def is_approximately_integer(value):
|
||||
if isinstance(value, integer_types):
|
||||
if isinstance(value, int):
|
||||
return True
|
||||
return abs(value - round(value)) < 0.001
|
||||
|
||||
|
||||
class TimePeriod(object):
|
||||
class TimePeriod:
|
||||
def __init__(self, microseconds=None, milliseconds=None, seconds=None,
|
||||
minutes=None, hours=None, days=None):
|
||||
if days is not None:
|
||||
@ -137,17 +130,17 @@ class TimePeriod(object):
|
||||
|
||||
def __str__(self):
|
||||
if self.microseconds is not None:
|
||||
return '{}us'.format(self.total_microseconds)
|
||||
return f'{self.total_microseconds}us'
|
||||
if self.milliseconds is not None:
|
||||
return '{}ms'.format(self.total_milliseconds)
|
||||
return f'{self.total_milliseconds}ms'
|
||||
if self.seconds is not None:
|
||||
return '{}s'.format(self.total_seconds)
|
||||
return f'{self.total_seconds}s'
|
||||
if self.minutes is not None:
|
||||
return '{}min'.format(self.total_minutes)
|
||||
return f'{self.total_minutes}min'
|
||||
if self.hours is not None:
|
||||
return '{}h'.format(self.total_hours)
|
||||
return f'{self.total_hours}h'
|
||||
if self.days is not None:
|
||||
return '{}d'.format(self.total_days)
|
||||
return f'{self.total_days}d'
|
||||
return '0s'
|
||||
|
||||
@property
|
||||
@ -224,7 +217,7 @@ class TimePeriodMinutes(TimePeriod):
|
||||
LAMBDA_PROG = re.compile(r'id\(\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\)(\.?)')
|
||||
|
||||
|
||||
class Lambda(object):
|
||||
class Lambda:
|
||||
def __init__(self, value):
|
||||
# pylint: disable=protected-access
|
||||
if isinstance(value, Lambda):
|
||||
@ -260,10 +253,10 @@ class Lambda(object):
|
||||
return self.value
|
||||
|
||||
def __repr__(self):
|
||||
return u'Lambda<{}>'.format(self.value)
|
||||
return f'Lambda<{self.value}>'
|
||||
|
||||
|
||||
class ID(object):
|
||||
class ID:
|
||||
def __init__(self, id, is_declaration=False, type=None, is_manual=None):
|
||||
self.id = id
|
||||
if is_manual is None:
|
||||
@ -289,7 +282,7 @@ class ID(object):
|
||||
return self.id
|
||||
|
||||
def __repr__(self):
|
||||
return u'ID<{} declaration={}, type={}, manual={}>'.format(
|
||||
return 'ID<{} declaration={}, type={}, manual={}>'.format(
|
||||
self.id, self.is_declaration, self.type, self.is_manual)
|
||||
|
||||
def __eq__(self, other):
|
||||
@ -305,10 +298,10 @@ class ID(object):
|
||||
is_manual=self.is_manual)
|
||||
|
||||
|
||||
class DocumentLocation(object):
|
||||
class DocumentLocation:
|
||||
def __init__(self, document, line, column):
|
||||
# type: (basestring, int, int) -> None
|
||||
self.document = document # type: basestring
|
||||
# type: (str, int, int) -> None
|
||||
self.document = document # type: str
|
||||
self.line = line # type: int
|
||||
self.column = column # type: int
|
||||
|
||||
@ -321,10 +314,10 @@ class DocumentLocation(object):
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return u'{} {}:{}'.format(self.document, self.line, self.column)
|
||||
return f'{self.document} {self.line}:{self.column}'
|
||||
|
||||
|
||||
class DocumentRange(object):
|
||||
class DocumentRange:
|
||||
def __init__(self, start_mark, end_mark):
|
||||
# type: (DocumentLocation, DocumentLocation) -> None
|
||||
self.start_mark = start_mark # type: DocumentLocation
|
||||
@ -338,10 +331,10 @@ class DocumentRange(object):
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return u'[{} - {}]'.format(self.start_mark, self.end_mark)
|
||||
return f'[{self.start_mark} - {self.end_mark}]'
|
||||
|
||||
|
||||
class Define(object):
|
||||
class Define:
|
||||
def __init__(self, name, value=None):
|
||||
self.name = name
|
||||
self.value = value
|
||||
@ -349,14 +342,14 @@ class Define(object):
|
||||
@property
|
||||
def as_build_flag(self):
|
||||
if self.value is None:
|
||||
return u'-D{}'.format(self.name)
|
||||
return u'-D{}={}'.format(self.name, self.value)
|
||||
return f'-D{self.name}'
|
||||
return f'-D{self.name}={self.value}'
|
||||
|
||||
@property
|
||||
def as_macro(self):
|
||||
if self.value is None:
|
||||
return u'#define {}'.format(self.name)
|
||||
return u'#define {} {}'.format(self.name, self.value)
|
||||
return f'#define {self.name}'
|
||||
return f'#define {self.name} {self.value}'
|
||||
|
||||
@property
|
||||
def as_tuple(self):
|
||||
@ -369,7 +362,7 @@ class Define(object):
|
||||
return isinstance(self, type(other)) and self.as_tuple == other.as_tuple
|
||||
|
||||
|
||||
class Library(object):
|
||||
class Library:
|
||||
def __init__(self, name, version):
|
||||
self.name = name
|
||||
self.version = version
|
||||
@ -378,7 +371,7 @@ class Library(object):
|
||||
def as_lib_dep(self):
|
||||
if self.version is None:
|
||||
return self.name
|
||||
return u'{}@{}'.format(self.name, self.version)
|
||||
return f'{self.name}@{self.version}'
|
||||
|
||||
@property
|
||||
def as_tuple(self):
|
||||
@ -461,7 +454,7 @@ def find_source_files(file):
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes,too-many-public-methods
|
||||
class EsphomeCore(object):
|
||||
class EsphomeCore:
|
||||
def __init__(self):
|
||||
# True if command is run from dashboard
|
||||
self.dashboard = False
|
||||
@ -499,7 +492,7 @@ class EsphomeCore(object):
|
||||
# A set of build flags to set in the platformio project
|
||||
self.build_flags = set() # type: Set[str]
|
||||
# A set of defines to set for the compile process in esphome/core/defines.h
|
||||
self.defines = set() # type: Set[Define]
|
||||
self.defines = set() # type: Set['Define']
|
||||
# A dictionary of started coroutines, used to warn when a coroutine was not
|
||||
# awaited.
|
||||
self.active_coroutines = {} # type: Dict[int, Any]
|
||||
@ -634,15 +627,15 @@ class EsphomeCore(object):
|
||||
|
||||
# Print not-awaited coroutines
|
||||
for obj in self.active_coroutines.values():
|
||||
_LOGGER.warning(u"Coroutine '%s' %s was never awaited with 'yield'.", obj.__name__, obj)
|
||||
_LOGGER.warning(u"Please file a bug report with your configuration.")
|
||||
_LOGGER.warning("Coroutine '%s' %s was never awaited with 'yield'.", obj.__name__, obj)
|
||||
_LOGGER.warning("Please file a bug report with your configuration.")
|
||||
if self.active_coroutines:
|
||||
raise EsphomeError()
|
||||
if self.component_ids:
|
||||
comps = u', '.join(u"'{}'".format(x) for x in self.component_ids)
|
||||
_LOGGER.warning(u"Components %s were never registered. Please create a bug report",
|
||||
comps = ', '.join(f"'{x}'" for x in self.component_ids)
|
||||
_LOGGER.warning("Components %s were never registered. Please create a bug report",
|
||||
comps)
|
||||
_LOGGER.warning(u"with your configuration.")
|
||||
_LOGGER.warning("with your configuration.")
|
||||
raise EsphomeError()
|
||||
self.active_coroutines.clear()
|
||||
|
||||
@ -652,8 +645,8 @@ class EsphomeCore(object):
|
||||
if isinstance(expression, Expression):
|
||||
expression = statement(expression)
|
||||
if not isinstance(expression, Statement):
|
||||
raise ValueError(u"Add '{}' must be expression or statement, not {}"
|
||||
u"".format(expression, type(expression)))
|
||||
raise ValueError("Add '{}' must be expression or statement, not {}"
|
||||
"".format(expression, type(expression)))
|
||||
|
||||
self.main_statements.append(expression)
|
||||
_LOGGER.debug("Adding: %s", expression)
|
||||
@ -665,16 +658,16 @@ class EsphomeCore(object):
|
||||
if isinstance(expression, Expression):
|
||||
expression = statement(expression)
|
||||
if not isinstance(expression, Statement):
|
||||
raise ValueError(u"Add '{}' must be expression or statement, not {}"
|
||||
u"".format(expression, type(expression)))
|
||||
raise ValueError("Add '{}' must be expression or statement, not {}"
|
||||
"".format(expression, type(expression)))
|
||||
self.global_statements.append(expression)
|
||||
_LOGGER.debug("Adding global: %s", expression)
|
||||
return expression
|
||||
|
||||
def add_library(self, library):
|
||||
if not isinstance(library, Library):
|
||||
raise ValueError(u"Library {} must be instance of Library, not {}"
|
||||
u"".format(library, type(library)))
|
||||
raise ValueError("Library {} must be instance of Library, not {}"
|
||||
"".format(library, type(library)))
|
||||
_LOGGER.debug("Adding library: %s", library)
|
||||
for other in self.libraries[:]:
|
||||
if other.name != library.name:
|
||||
@ -689,9 +682,9 @@ class EsphomeCore(object):
|
||||
if other.version == library.version:
|
||||
break
|
||||
|
||||
raise ValueError(u"Version pinning failed! Libraries {} and {} "
|
||||
u"requested with conflicting versions!"
|
||||
u"".format(library, other))
|
||||
raise ValueError("Version pinning failed! Libraries {} and {} "
|
||||
"requested with conflicting versions!"
|
||||
"".format(library, other))
|
||||
else:
|
||||
self.libraries.append(library)
|
||||
return library
|
||||
@ -702,20 +695,20 @@ class EsphomeCore(object):
|
||||
return build_flag
|
||||
|
||||
def add_define(self, define):
|
||||
if isinstance(define, string_types):
|
||||
if isinstance(define, str):
|
||||
define = Define(define)
|
||||
elif isinstance(define, Define):
|
||||
pass
|
||||
else:
|
||||
raise ValueError(u"Define {} must be string or Define, not {}"
|
||||
u"".format(define, type(define)))
|
||||
raise ValueError("Define {} must be string or Define, not {}"
|
||||
"".format(define, type(define)))
|
||||
self.defines.add(define)
|
||||
_LOGGER.debug("Adding define: %s", define)
|
||||
return define
|
||||
|
||||
def get_variable(self, id):
|
||||
if not isinstance(id, ID):
|
||||
raise ValueError("ID {!r} must be of type ID!".format(id))
|
||||
raise ValueError(f"ID {id!r} must be of type ID!")
|
||||
while True:
|
||||
if id in self.variables:
|
||||
yield self.variables[id]
|
||||
@ -735,7 +728,7 @@ class EsphomeCore(object):
|
||||
|
||||
def register_variable(self, id, obj):
|
||||
if id in self.variables:
|
||||
raise EsphomeError("ID {} is already registered".format(id))
|
||||
raise EsphomeError(f"ID {id} is already registered")
|
||||
_LOGGER.debug("Registered variable %s of type %s", id.id, id.type)
|
||||
self.variables[id] = obj
|
||||
|
||||
@ -748,10 +741,10 @@ class EsphomeCore(object):
|
||||
|
||||
main_code = []
|
||||
for exp in self.main_statements:
|
||||
text = text_type(statement(exp))
|
||||
text = str(statement(exp))
|
||||
text = text.rstrip()
|
||||
main_code.append(text)
|
||||
return u'\n'.join(main_code) + u'\n\n'
|
||||
return '\n'.join(main_code) + '\n\n'
|
||||
|
||||
@property
|
||||
def cpp_global_section(self):
|
||||
@ -759,17 +752,17 @@ class EsphomeCore(object):
|
||||
|
||||
global_code = []
|
||||
for exp in self.global_statements:
|
||||
text = text_type(statement(exp))
|
||||
text = str(statement(exp))
|
||||
text = text.rstrip()
|
||||
global_code.append(text)
|
||||
return u'\n'.join(global_code) + u'\n'
|
||||
return '\n'.join(global_code) + '\n'
|
||||
|
||||
|
||||
class AutoLoad(OrderedDict):
|
||||
pass
|
||||
|
||||
|
||||
class EnumValue(object):
|
||||
class EnumValue:
|
||||
"""Special type used by ESPHome to mark enum values for cv.enum."""
|
||||
@property
|
||||
def enum_value(self):
|
||||
|
@ -138,6 +138,9 @@ float Component::get_actual_setup_priority() const {
|
||||
return this->setup_priority_override_;
|
||||
}
|
||||
void Component::set_setup_priority(float priority) { this->setup_priority_override_ = priority; }
|
||||
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wpmf-conversions"
|
||||
bool Component::has_overridden_loop() const {
|
||||
#ifdef CLANG_TIDY
|
||||
bool loop_overridden = true;
|
||||
@ -148,6 +151,7 @@ bool Component::has_overridden_loop() const {
|
||||
#endif
|
||||
return loop_overridden || call_loop_overridden;
|
||||
}
|
||||
#pragma GCC diagnostic pop
|
||||
|
||||
PollingComponent::PollingComponent(uint32_t update_interval) : Component(), update_interval_(update_interval) {}
|
||||
|
||||
|
@ -37,8 +37,8 @@ def validate_board(value):
|
||||
raise NotImplementedError
|
||||
|
||||
if value not in board_pins:
|
||||
raise cv.Invalid(u"Could not find board '{}'. Valid boards are {}".format(
|
||||
value, u', '.join(sorted(board_pins.keys()))))
|
||||
raise cv.Invalid("Could not find board '{}'. Valid boards are {}".format(
|
||||
value, ', '.join(sorted(board_pins.keys()))))
|
||||
return value
|
||||
|
||||
|
||||
@ -108,8 +108,8 @@ def valid_include(value):
|
||||
value = cv.file_(value)
|
||||
_, ext = os.path.splitext(value)
|
||||
if ext not in VALID_INCLUDE_EXTS:
|
||||
raise cv.Invalid(u"Include has invalid file extension {} - valid extensions are {}"
|
||||
u"".format(ext, ', '.join(VALID_INCLUDE_EXTS)))
|
||||
raise cv.Invalid("Include has invalid file extension {} - valid extensions are {}"
|
||||
"".format(ext, ', '.join(VALID_INCLUDE_EXTS)))
|
||||
return value
|
||||
|
||||
|
||||
@ -184,7 +184,7 @@ def include_file(path, basename):
|
||||
_, ext = os.path.splitext(path)
|
||||
if ext in ['.h', '.hpp', '.tcc']:
|
||||
# Header, add include statement
|
||||
cg.add_global(cg.RawStatement(u'#include "{}"'.format(basename)))
|
||||
cg.add_global(cg.RawStatement(f'#include "{basename}"'))
|
||||
|
||||
|
||||
@coroutine_with_priority(-1000.0)
|
||||
@ -238,7 +238,7 @@ def to_code(config):
|
||||
ld_script = ld_scripts[1]
|
||||
|
||||
if ld_script is not None:
|
||||
cg.add_build_flag('-Wl,-T{}'.format(ld_script))
|
||||
cg.add_build_flag(f'-Wl,-T{ld_script}')
|
||||
|
||||
cg.add_build_flag('-fno-exceptions')
|
||||
|
||||
|
@ -10,22 +10,21 @@ from esphome.core import ( # noqa
|
||||
TimePeriodMilliseconds, TimePeriodMinutes, TimePeriodSeconds, coroutine, Library, Define,
|
||||
EnumValue)
|
||||
from esphome.helpers import cpp_string_escape, indent_all_but_first_and_last
|
||||
from esphome.py_compat import integer_types, string_types, text_type
|
||||
from esphome.util import OrderedDict
|
||||
|
||||
|
||||
class Expression(object):
|
||||
class Expression:
|
||||
def __str__(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
SafeExpType = Union[Expression, bool, str, text_type, int, float, TimePeriod,
|
||||
SafeExpType = Union[Expression, bool, str, str, int, float, TimePeriod,
|
||||
Type[bool], Type[int], Type[float], List[Any]]
|
||||
|
||||
|
||||
class RawExpression(Expression):
|
||||
def __init__(self, text): # type: (Union[str, unicode]) -> None
|
||||
super(RawExpression, self).__init__()
|
||||
def __init__(self, text): # type: (Union[str, str]) -> None
|
||||
super().__init__()
|
||||
self.text = text
|
||||
|
||||
def __str__(self):
|
||||
@ -35,7 +34,7 @@ class RawExpression(Expression):
|
||||
# pylint: disable=redefined-builtin
|
||||
class AssignmentExpression(Expression):
|
||||
def __init__(self, type, modifier, name, rhs, obj):
|
||||
super(AssignmentExpression, self).__init__()
|
||||
super().__init__()
|
||||
self.type = type
|
||||
self.modifier = modifier
|
||||
self.name = name
|
||||
@ -44,24 +43,24 @@ class AssignmentExpression(Expression):
|
||||
|
||||
def __str__(self):
|
||||
if self.type is None:
|
||||
return u"{} = {}".format(self.name, self.rhs)
|
||||
return u"{} {}{} = {}".format(self.type, self.modifier, self.name, self.rhs)
|
||||
return f"{self.name} = {self.rhs}"
|
||||
return f"{self.type} {self.modifier}{self.name} = {self.rhs}"
|
||||
|
||||
|
||||
class VariableDeclarationExpression(Expression):
|
||||
def __init__(self, type, modifier, name):
|
||||
super(VariableDeclarationExpression, self).__init__()
|
||||
super().__init__()
|
||||
self.type = type
|
||||
self.modifier = modifier
|
||||
self.name = name
|
||||
|
||||
def __str__(self):
|
||||
return u"{} {}{}".format(self.type, self.modifier, self.name)
|
||||
return f"{self.type} {self.modifier}{self.name}"
|
||||
|
||||
|
||||
class ExpressionList(Expression):
|
||||
def __init__(self, *args):
|
||||
super(ExpressionList, self).__init__()
|
||||
super().__init__()
|
||||
# Remove every None on end
|
||||
args = list(args)
|
||||
while args and args[-1] is None:
|
||||
@ -69,7 +68,7 @@ class ExpressionList(Expression):
|
||||
self.args = [safe_exp(arg) for arg in args]
|
||||
|
||||
def __str__(self):
|
||||
text = u", ".join(text_type(x) for x in self.args)
|
||||
text = ", ".join(str(x) for x in self.args)
|
||||
return indent_all_but_first_and_last(text)
|
||||
|
||||
def __iter__(self):
|
||||
@ -78,11 +77,11 @@ class ExpressionList(Expression):
|
||||
|
||||
class TemplateArguments(Expression):
|
||||
def __init__(self, *args): # type: (*SafeExpType) -> None
|
||||
super(TemplateArguments, self).__init__()
|
||||
super().__init__()
|
||||
self.args = ExpressionList(*args)
|
||||
|
||||
def __str__(self):
|
||||
return u'<{}>'.format(self.args)
|
||||
return f'<{self.args}>'
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.args)
|
||||
@ -90,7 +89,7 @@ class TemplateArguments(Expression):
|
||||
|
||||
class CallExpression(Expression):
|
||||
def __init__(self, base, *args): # type: (Expression, *SafeExpType) -> None
|
||||
super(CallExpression, self).__init__()
|
||||
super().__init__()
|
||||
self.base = base
|
||||
if args and isinstance(args[0], TemplateArguments):
|
||||
self.template_args = args[0]
|
||||
@ -101,13 +100,13 @@ class CallExpression(Expression):
|
||||
|
||||
def __str__(self):
|
||||
if self.template_args is not None:
|
||||
return u'{}{}({})'.format(self.base, self.template_args, self.args)
|
||||
return u'{}({})'.format(self.base, self.args)
|
||||
return f'{self.base}{self.template_args}({self.args})'
|
||||
return f'{self.base}({self.args})'
|
||||
|
||||
|
||||
class StructInitializer(Expression):
|
||||
def __init__(self, base, *args): # type: (Expression, *Tuple[str, SafeExpType]) -> None
|
||||
super(StructInitializer, self).__init__()
|
||||
super().__init__()
|
||||
self.base = base
|
||||
if not isinstance(args, OrderedDict):
|
||||
args = OrderedDict(args)
|
||||
@ -119,16 +118,16 @@ class StructInitializer(Expression):
|
||||
self.args[key] = exp
|
||||
|
||||
def __str__(self):
|
||||
cpp = u'{}{{\n'.format(self.base)
|
||||
cpp = f'{self.base}{{\n'
|
||||
for key, value in self.args.items():
|
||||
cpp += u' .{} = {},\n'.format(key, value)
|
||||
cpp += u'}'
|
||||
cpp += f' .{key} = {value},\n'
|
||||
cpp += '}'
|
||||
return cpp
|
||||
|
||||
|
||||
class ArrayInitializer(Expression):
|
||||
def __init__(self, *args, **kwargs): # type: (*Any, **Any) -> None
|
||||
super(ArrayInitializer, self).__init__()
|
||||
super().__init__()
|
||||
self.multiline = kwargs.get('multiline', False)
|
||||
self.args = []
|
||||
for arg in args:
|
||||
@ -139,30 +138,30 @@ class ArrayInitializer(Expression):
|
||||
|
||||
def __str__(self):
|
||||
if not self.args:
|
||||
return u'{}'
|
||||
return '{}'
|
||||
if self.multiline:
|
||||
cpp = u'{\n'
|
||||
cpp = '{\n'
|
||||
for arg in self.args:
|
||||
cpp += u' {},\n'.format(arg)
|
||||
cpp += u'}'
|
||||
cpp += f' {arg},\n'
|
||||
cpp += '}'
|
||||
else:
|
||||
cpp = u'{' + u', '.join(str(arg) for arg in self.args) + u'}'
|
||||
cpp = '{' + ', '.join(str(arg) for arg in self.args) + '}'
|
||||
return cpp
|
||||
|
||||
|
||||
class ParameterExpression(Expression):
|
||||
def __init__(self, type, id):
|
||||
super(ParameterExpression, self).__init__()
|
||||
super().__init__()
|
||||
self.type = safe_exp(type)
|
||||
self.id = id
|
||||
|
||||
def __str__(self):
|
||||
return u"{} {}".format(self.type, self.id)
|
||||
return f"{self.type} {self.id}"
|
||||
|
||||
|
||||
class ParameterListExpression(Expression):
|
||||
def __init__(self, *parameters):
|
||||
super(ParameterListExpression, self).__init__()
|
||||
super().__init__()
|
||||
self.parameters = []
|
||||
for parameter in parameters:
|
||||
if not isinstance(parameter, ParameterExpression):
|
||||
@ -170,12 +169,12 @@ class ParameterListExpression(Expression):
|
||||
self.parameters.append(parameter)
|
||||
|
||||
def __str__(self):
|
||||
return u", ".join(text_type(x) for x in self.parameters)
|
||||
return ", ".join(str(x) for x in self.parameters)
|
||||
|
||||
|
||||
class LambdaExpression(Expression):
|
||||
def __init__(self, parts, parameters, capture='=', return_type=None):
|
||||
super(LambdaExpression, self).__init__()
|
||||
super().__init__()
|
||||
self.parts = parts
|
||||
if not isinstance(parameters, ParameterListExpression):
|
||||
parameters = ParameterListExpression(*parameters)
|
||||
@ -184,15 +183,15 @@ class LambdaExpression(Expression):
|
||||
self.return_type = safe_exp(return_type) if return_type is not None else None
|
||||
|
||||
def __str__(self):
|
||||
cpp = u'[{}]({})'.format(self.capture, self.parameters)
|
||||
cpp = f'[{self.capture}]({self.parameters})'
|
||||
if self.return_type is not None:
|
||||
cpp += u' -> {}'.format(self.return_type)
|
||||
cpp += u' {{\n{}\n}}'.format(self.content)
|
||||
cpp += f' -> {self.return_type}'
|
||||
cpp += f' {{\n{self.content}\n}}'
|
||||
return indent_all_but_first_and_last(cpp)
|
||||
|
||||
@property
|
||||
def content(self):
|
||||
return u''.join(text_type(part) for part in self.parts)
|
||||
return ''.join(str(part) for part in self.parts)
|
||||
|
||||
|
||||
class Literal(Expression):
|
||||
@ -201,41 +200,41 @@ class Literal(Expression):
|
||||
|
||||
|
||||
class StringLiteral(Literal):
|
||||
def __init__(self, string): # type: (Union[str, unicode]) -> None
|
||||
super(StringLiteral, self).__init__()
|
||||
def __init__(self, string): # type: (Union[str, str]) -> None
|
||||
super().__init__()
|
||||
self.string = string
|
||||
|
||||
def __str__(self):
|
||||
return u'{}'.format(cpp_string_escape(self.string))
|
||||
return '{}'.format(cpp_string_escape(self.string))
|
||||
|
||||
|
||||
class IntLiteral(Literal):
|
||||
def __init__(self, i): # type: (Union[int, long]) -> None
|
||||
super(IntLiteral, self).__init__()
|
||||
def __init__(self, i): # type: (Union[int]) -> None
|
||||
super().__init__()
|
||||
self.i = i
|
||||
|
||||
def __str__(self):
|
||||
if self.i > 4294967295:
|
||||
return u'{}ULL'.format(self.i)
|
||||
return f'{self.i}ULL'
|
||||
if self.i > 2147483647:
|
||||
return u'{}UL'.format(self.i)
|
||||
return f'{self.i}UL'
|
||||
if self.i < -2147483648:
|
||||
return u'{}LL'.format(self.i)
|
||||
return text_type(self.i)
|
||||
return f'{self.i}LL'
|
||||
return str(self.i)
|
||||
|
||||
|
||||
class BoolLiteral(Literal):
|
||||
def __init__(self, binary): # type: (bool) -> None
|
||||
super(BoolLiteral, self).__init__()
|
||||
super().__init__()
|
||||
self.binary = binary
|
||||
|
||||
def __str__(self):
|
||||
return u"true" if self.binary else u"false"
|
||||
return "true" if self.binary else "false"
|
||||
|
||||
|
||||
class HexIntLiteral(Literal):
|
||||
def __init__(self, i): # type: (int) -> None
|
||||
super(HexIntLiteral, self).__init__()
|
||||
super().__init__()
|
||||
self.i = HexInt(i)
|
||||
|
||||
def __str__(self):
|
||||
@ -244,18 +243,18 @@ class HexIntLiteral(Literal):
|
||||
|
||||
class FloatLiteral(Literal):
|
||||
def __init__(self, value): # type: (float) -> None
|
||||
super(FloatLiteral, self).__init__()
|
||||
super().__init__()
|
||||
self.float_ = value
|
||||
|
||||
def __str__(self):
|
||||
if math.isnan(self.float_):
|
||||
return u"NAN"
|
||||
return u"{}f".format(self.float_)
|
||||
return "NAN"
|
||||
return f"{self.float_}f"
|
||||
|
||||
|
||||
# pylint: disable=bad-continuation
|
||||
def safe_exp(
|
||||
obj # type: Union[Expression, bool, str, unicode, int, long, float, TimePeriod, list]
|
||||
obj # type: Union[Expression, bool, str, int, float, TimePeriod, list]
|
||||
):
|
||||
# type: (...) -> Expression
|
||||
"""Try to convert obj to an expression by automatically converting native python types to
|
||||
@ -269,11 +268,11 @@ def safe_exp(
|
||||
return safe_exp(obj.enum_value)
|
||||
if isinstance(obj, bool):
|
||||
return BoolLiteral(obj)
|
||||
if isinstance(obj, string_types):
|
||||
if isinstance(obj, str):
|
||||
return StringLiteral(obj)
|
||||
if isinstance(obj, HexInt):
|
||||
return HexIntLiteral(obj)
|
||||
if isinstance(obj, integer_types):
|
||||
if isinstance(obj, int):
|
||||
return IntLiteral(obj)
|
||||
if isinstance(obj, float):
|
||||
return FloatLiteral(obj)
|
||||
@ -294,15 +293,15 @@ def safe_exp(
|
||||
if obj is float:
|
||||
return float_
|
||||
if isinstance(obj, ID):
|
||||
raise ValueError(u"Object {} is an ID. Did you forget to register the variable?"
|
||||
u"".format(obj))
|
||||
raise ValueError("Object {} is an ID. Did you forget to register the variable?"
|
||||
"".format(obj))
|
||||
if inspect.isgenerator(obj):
|
||||
raise ValueError(u"Object {} is a coroutine. Did you forget to await the expression with "
|
||||
u"'yield'?".format(obj))
|
||||
raise ValueError(u"Object is not an expression", obj)
|
||||
raise ValueError("Object {} is a coroutine. Did you forget to await the expression with "
|
||||
"'yield'?".format(obj))
|
||||
raise ValueError("Object is not an expression", obj)
|
||||
|
||||
|
||||
class Statement(object):
|
||||
class Statement:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@ -312,7 +311,7 @@ class Statement(object):
|
||||
|
||||
class RawStatement(Statement):
|
||||
def __init__(self, text):
|
||||
super(RawStatement, self).__init__()
|
||||
super().__init__()
|
||||
self.text = text
|
||||
|
||||
def __str__(self):
|
||||
@ -321,38 +320,38 @@ class RawStatement(Statement):
|
||||
|
||||
class ExpressionStatement(Statement):
|
||||
def __init__(self, expression):
|
||||
super(ExpressionStatement, self).__init__()
|
||||
super().__init__()
|
||||
self.expression = safe_exp(expression)
|
||||
|
||||
def __str__(self):
|
||||
return u"{};".format(self.expression)
|
||||
return f"{self.expression};"
|
||||
|
||||
|
||||
class LineComment(Statement):
|
||||
def __init__(self, value): # type: (unicode) -> None
|
||||
super(LineComment, self).__init__()
|
||||
def __init__(self, value): # type: (str) -> None
|
||||
super().__init__()
|
||||
self._value = value
|
||||
|
||||
def __str__(self):
|
||||
parts = self._value.split(u'\n')
|
||||
parts = [u'// {}'.format(x) for x in parts]
|
||||
return u'\n'.join(parts)
|
||||
parts = self._value.split('\n')
|
||||
parts = [f'// {x}' for x in parts]
|
||||
return '\n'.join(parts)
|
||||
|
||||
|
||||
class ProgmemAssignmentExpression(AssignmentExpression):
|
||||
def __init__(self, type, name, rhs, obj):
|
||||
super(ProgmemAssignmentExpression, self).__init__(
|
||||
super().__init__(
|
||||
type, '', name, rhs, obj
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
type_ = self.type
|
||||
return u"static const {} {}[] PROGMEM = {}".format(type_, self.name, self.rhs)
|
||||
return f"static const {type_} {self.name}[] PROGMEM = {self.rhs}"
|
||||
|
||||
|
||||
def progmem_array(id, rhs):
|
||||
rhs = safe_exp(rhs)
|
||||
obj = MockObj(id, u'.')
|
||||
obj = MockObj(id, '.')
|
||||
assignment = ProgmemAssignmentExpression(id.type, id, rhs, obj)
|
||||
CORE.add(assignment)
|
||||
CORE.register_variable(id, obj)
|
||||
@ -381,7 +380,7 @@ def variable(id, # type: ID
|
||||
"""
|
||||
assert isinstance(id, ID)
|
||||
rhs = safe_exp(rhs)
|
||||
obj = MockObj(id, u'.')
|
||||
obj = MockObj(id, '.')
|
||||
if type is not None:
|
||||
id.type = type
|
||||
assignment = AssignmentExpression(id.type, '', id, rhs, obj)
|
||||
@ -405,7 +404,7 @@ def Pvariable(id, # type: ID
|
||||
:returns The new variable as a MockObj.
|
||||
"""
|
||||
rhs = safe_exp(rhs)
|
||||
obj = MockObj(id, u'->')
|
||||
obj = MockObj(id, '->')
|
||||
if type is not None:
|
||||
id.type = type
|
||||
decl = VariableDeclarationExpression(id.type, '*', id)
|
||||
@ -594,51 +593,51 @@ class MockObj(Expression):
|
||||
|
||||
Mostly consists of magic methods that allow ESPHome's codegen syntax.
|
||||
"""
|
||||
def __init__(self, base, op=u'.'):
|
||||
def __init__(self, base, op='.'):
|
||||
self.base = base
|
||||
self.op = op
|
||||
super(MockObj, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
def __getattr__(self, attr): # type: (str) -> MockObj
|
||||
next_op = u'.'
|
||||
if attr.startswith(u'P') and self.op not in ['::', '']:
|
||||
next_op = '.'
|
||||
if attr.startswith('P') and self.op not in ['::', '']:
|
||||
attr = attr[1:]
|
||||
next_op = u'->'
|
||||
if attr.startswith(u'_'):
|
||||
next_op = '->'
|
||||
if attr.startswith('_'):
|
||||
attr = attr[1:]
|
||||
return MockObj(u'{}{}{}'.format(self.base, self.op, attr), next_op)
|
||||
return MockObj(f'{self.base}{self.op}{attr}', next_op)
|
||||
|
||||
def __call__(self, *args): # type: (SafeExpType) -> MockObj
|
||||
call = CallExpression(self.base, *args)
|
||||
return MockObj(call, self.op)
|
||||
|
||||
def __str__(self): # type: () -> unicode
|
||||
return text_type(self.base)
|
||||
def __str__(self): # type: () -> str
|
||||
return str(self.base)
|
||||
|
||||
def __repr__(self):
|
||||
return u'MockObj<{}>'.format(text_type(self.base))
|
||||
return 'MockObj<{}>'.format(str(self.base))
|
||||
|
||||
@property
|
||||
def _(self): # type: () -> MockObj
|
||||
return MockObj(u'{}{}'.format(self.base, self.op))
|
||||
return MockObj(f'{self.base}{self.op}')
|
||||
|
||||
@property
|
||||
def new(self): # type: () -> MockObj
|
||||
return MockObj(u'new {}'.format(self.base), u'->')
|
||||
return MockObj(f'new {self.base}', '->')
|
||||
|
||||
def template(self, *args): # type: (*SafeExpType) -> MockObj
|
||||
if len(args) != 1 or not isinstance(args[0], TemplateArguments):
|
||||
args = TemplateArguments(*args)
|
||||
else:
|
||||
args = args[0]
|
||||
return MockObj(u'{}{}'.format(self.base, args))
|
||||
return MockObj(f'{self.base}{args}')
|
||||
|
||||
def namespace(self, name): # type: (str) -> MockObj
|
||||
return MockObj(u'{}{}'.format(self._, name), u'::')
|
||||
return MockObj(f'{self._}{name}', '::')
|
||||
|
||||
def class_(self, name, *parents): # type: (str, *MockObjClass) -> MockObjClass
|
||||
op = '' if self.op == '' else '::'
|
||||
return MockObjClass(u'{}{}{}'.format(self.base, op, name), u'.', parents=parents)
|
||||
return MockObjClass(f'{self.base}{op}{name}', '.', parents=parents)
|
||||
|
||||
def struct(self, name): # type: (str) -> MockObjClass
|
||||
return self.class_(name)
|
||||
@ -648,24 +647,24 @@ class MockObj(Expression):
|
||||
|
||||
def operator(self, name): # type: (str) -> MockObj
|
||||
if name == 'ref':
|
||||
return MockObj(u'{} &'.format(self.base), u'')
|
||||
return MockObj(f'{self.base} &', '')
|
||||
if name == 'ptr':
|
||||
return MockObj(u'{} *'.format(self.base), u'')
|
||||
return MockObj(f'{self.base} *', '')
|
||||
if name == "const":
|
||||
return MockObj(u'const {}'.format(self.base), u'')
|
||||
return MockObj(f'const {self.base}', '')
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def using(self): # type: () -> MockObj
|
||||
assert self.op == '::'
|
||||
return MockObj(u'using namespace {}'.format(self.base))
|
||||
return MockObj(f'using namespace {self.base}')
|
||||
|
||||
def __getitem__(self, item): # type: (Union[str, Expression]) -> MockObj
|
||||
next_op = u'.'
|
||||
if isinstance(item, str) and item.startswith(u'P'):
|
||||
next_op = '.'
|
||||
if isinstance(item, str) and item.startswith('P'):
|
||||
item = item[1:]
|
||||
next_op = u'->'
|
||||
return MockObj(u'{}[{}]'.format(self.base, item), next_op)
|
||||
next_op = '->'
|
||||
return MockObj(f'{self.base}[{item}]', next_op)
|
||||
|
||||
|
||||
class MockObjEnum(MockObj):
|
||||
@ -679,13 +678,13 @@ class MockObjEnum(MockObj):
|
||||
kwargs['base'] = base
|
||||
MockObj.__init__(self, *args, **kwargs)
|
||||
|
||||
def __str__(self): # type: () -> unicode
|
||||
def __str__(self): # type: () -> str
|
||||
if self._is_class:
|
||||
return super(MockObjEnum, self).__str__()
|
||||
return u'{}{}{}'.format(self.base, self.op, self._enum)
|
||||
return super().__str__()
|
||||
return f'{self.base}{self.op}{self._enum}'
|
||||
|
||||
def __repr__(self):
|
||||
return u'MockObj<{}>'.format(text_type(self.base))
|
||||
return 'MockObj<{}>'.format(str(self.base))
|
||||
|
||||
|
||||
class MockObjClass(MockObj):
|
||||
@ -716,7 +715,7 @@ class MockObjClass(MockObj):
|
||||
args = args[0]
|
||||
new_parents = self._parents[:]
|
||||
new_parents.append(self)
|
||||
return MockObjClass(u'{}{}'.format(self.base, args), parents=new_parents)
|
||||
return MockObjClass(f'{self.base}{args}', parents=new_parents)
|
||||
|
||||
def __repr__(self):
|
||||
return u'MockObjClass<{}, parents={}>'.format(text_type(self.base), self._parents)
|
||||
return 'MockObjClass<{}, parents={}>'.format(str(self.base), self._parents)
|
||||
|
@ -1,9 +1,10 @@
|
||||
from esphome.const import CONF_INVERTED, CONF_MODE, CONF_NUMBER, CONF_SETUP_PRIORITY, \
|
||||
CONF_UPDATE_INTERVAL, CONF_TYPE_ID
|
||||
from esphome.core import coroutine, ID, CORE
|
||||
# pylint: disable=unused-import
|
||||
from esphome.core import coroutine, ID, CORE, ConfigType
|
||||
from esphome.cpp_generator import RawExpression, add, get_variable
|
||||
from esphome.cpp_types import App, GPIOPin
|
||||
from esphome.py_compat import text_type
|
||||
from esphome.util import Registry, RegistryEntry
|
||||
|
||||
|
||||
@coroutine
|
||||
@ -35,11 +36,11 @@ def register_component(var, config):
|
||||
:param var: The variable representing the component.
|
||||
:param config: The configuration for the component.
|
||||
"""
|
||||
id_ = text_type(var.base)
|
||||
id_ = str(var.base)
|
||||
if id_ not in CORE.component_ids:
|
||||
raise ValueError(u"Component ID {} was not declared to inherit from Component, "
|
||||
u"or was registered twice. Please create a bug report with your "
|
||||
u"configuration.".format(id_))
|
||||
raise ValueError("Component ID {} was not declared to inherit from Component, "
|
||||
"or was registered twice. Please create a bug report with your "
|
||||
"configuration.".format(id_))
|
||||
CORE.component_ids.remove(id_)
|
||||
if CONF_SETUP_PRIORITY in config:
|
||||
add(var.set_setup_priority(config[CONF_SETUP_PRIORITY]))
|
||||
@ -59,7 +60,7 @@ def register_parented(var, value):
|
||||
|
||||
|
||||
def extract_registry_entry_config(registry, full_config):
|
||||
# type: ('Registry', 'ConfigType') -> 'RegistryEntry'
|
||||
# type: (Registry, ConfigType) -> RegistryEntry
|
||||
key, config = next((k, v) for k, v in full_config.items() if k in registry)
|
||||
return registry[key], config
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
# pylint: disable=wrong-import-position
|
||||
from __future__ import print_function
|
||||
|
||||
import codecs
|
||||
import collections
|
||||
@ -29,7 +28,6 @@ import tornado.websocket
|
||||
from esphome import const, util
|
||||
from esphome.__main__ import get_serial_ports
|
||||
from esphome.helpers import mkdir_p, get_bool_env, run_system_command
|
||||
from esphome.py_compat import IS_PY2, decode_text, encode_text
|
||||
from esphome.storage_json import EsphomeStorageJSON, StorageJSON, \
|
||||
esphome_storage_path, ext_storage_path, trash_storage_path
|
||||
from esphome.util import shlex_quote
|
||||
@ -42,7 +40,7 @@ from esphome.zeroconf import DashboardStatus, Zeroconf
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DashboardSettings(object):
|
||||
class DashboardSettings:
|
||||
def __init__(self):
|
||||
self.config_dir = ''
|
||||
self.password_digest = ''
|
||||
@ -58,9 +56,6 @@ class DashboardSettings(object):
|
||||
self.username = args.username or os.getenv('USERNAME', '')
|
||||
self.using_password = bool(password)
|
||||
if self.using_password:
|
||||
if IS_PY2:
|
||||
self.password_digest = hmac.new(password).digest()
|
||||
else:
|
||||
self.password_digest = hmac.new(password.encode()).digest()
|
||||
self.config_dir = args.configuration[0]
|
||||
|
||||
@ -88,8 +83,8 @@ class DashboardSettings(object):
|
||||
if username != self.username:
|
||||
return False
|
||||
|
||||
password_digest = hmac.new(encode_text(password)).digest()
|
||||
return hmac.compare_digest(self.password_digest, password_digest)
|
||||
password = hmac.new(password.encode()).digest()
|
||||
return username == self.username and hmac.compare_digest(self.password_digest, password)
|
||||
|
||||
def rel_path(self, *args):
|
||||
return os.path.join(self.config_dir, *args)
|
||||
@ -100,9 +95,6 @@ class DashboardSettings(object):
|
||||
|
||||
settings = DashboardSettings()
|
||||
|
||||
if IS_PY2:
|
||||
cookie_authenticated_yes = 'yes'
|
||||
else:
|
||||
cookie_authenticated_yes = b'yes'
|
||||
|
||||
|
||||
@ -181,7 +173,7 @@ def websocket_method(name):
|
||||
@websocket_class
|
||||
class EsphomeCommandWebSocket(tornado.websocket.WebSocketHandler):
|
||||
def __init__(self, application, request, **kwargs):
|
||||
super(EsphomeCommandWebSocket, self).__init__(application, request, **kwargs)
|
||||
super().__init__(application, request, **kwargs)
|
||||
self._proc = None
|
||||
self._is_closed = False
|
||||
|
||||
@ -204,7 +196,7 @@ class EsphomeCommandWebSocket(tornado.websocket.WebSocketHandler):
|
||||
# spawn can only be called once
|
||||
return
|
||||
command = self.build_command(json_message)
|
||||
_LOGGER.info(u"Running command '%s'", ' '.join(shlex_quote(x) for x in command))
|
||||
_LOGGER.info("Running command '%s'", ' '.join(shlex_quote(x) for x in command))
|
||||
self._proc = tornado.process.Subprocess(command,
|
||||
stdout=tornado.process.Subprocess.STREAM,
|
||||
stderr=subprocess.STDOUT,
|
||||
@ -227,9 +219,6 @@ class EsphomeCommandWebSocket(tornado.websocket.WebSocketHandler):
|
||||
|
||||
@tornado.gen.coroutine
|
||||
def _redirect_stdout(self):
|
||||
if IS_PY2:
|
||||
reg = '[\n\r]'
|
||||
else:
|
||||
reg = b'[\n\r]'
|
||||
|
||||
while True:
|
||||
@ -336,8 +325,8 @@ class WizardRequestHandler(BaseHandler):
|
||||
def post(self):
|
||||
from esphome import wizard
|
||||
|
||||
kwargs = {k: u''.join(decode_text(x) for x in v) for k, v in self.request.arguments.items()}
|
||||
destination = settings.rel_path(kwargs['name'] + u'.yaml')
|
||||
kwargs = {k: ''.join(str(x) for x in v) for k, v in self.request.arguments.items()}
|
||||
destination = settings.rel_path(kwargs['name'] + '.yaml')
|
||||
wizard.wizard_write(path=destination, **kwargs)
|
||||
self.redirect('./?begin=True')
|
||||
|
||||
@ -355,8 +344,8 @@ class DownloadBinaryRequestHandler(BaseHandler):
|
||||
|
||||
path = storage_json.firmware_bin_path
|
||||
self.set_header('Content-Type', 'application/octet-stream')
|
||||
filename = '{}.bin'.format(storage_json.name)
|
||||
self.set_header("Content-Disposition", 'attachment; filename="{}"'.format(filename))
|
||||
filename = f'{storage_json.name}.bin'
|
||||
self.set_header("Content-Disposition", f'attachment; filename="{filename}"')
|
||||
with open(path, 'rb') as f:
|
||||
while True:
|
||||
data = f.read(16384)
|
||||
@ -371,7 +360,7 @@ def _list_dashboard_entries():
|
||||
return [DashboardEntry(file) for file in files]
|
||||
|
||||
|
||||
class DashboardEntry(object):
|
||||
class DashboardEntry:
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self._storage = None
|
||||
@ -609,8 +598,8 @@ class LoginHandler(BaseHandler):
|
||||
'X-HASSIO-KEY': os.getenv('HASSIO_TOKEN'),
|
||||
}
|
||||
data = {
|
||||
'username': decode_text(self.get_argument('username', '')),
|
||||
'password': decode_text(self.get_argument('password', ''))
|
||||
'username': self.get_argument('username', ''),
|
||||
'password': self.get_argument('password', '')
|
||||
}
|
||||
try:
|
||||
req = requests.post('http://hassio/auth', headers=headers, data=data)
|
||||
@ -627,8 +616,8 @@ class LoginHandler(BaseHandler):
|
||||
self.render_login_page(error="Invalid username or password")
|
||||
|
||||
def post_native_login(self):
|
||||
username = decode_text(self.get_argument("username", ''))
|
||||
password = decode_text(self.get_argument("password", ''))
|
||||
username = self.get_argument("username", '')
|
||||
password = self.get_argument("password", '')
|
||||
if settings.check_password(username, password):
|
||||
self.set_secure_cookie("authenticated", cookie_authenticated_yes)
|
||||
self.redirect("/")
|
||||
@ -663,7 +652,7 @@ def get_static_file_url(name):
|
||||
with open(path, 'rb') as f_handle:
|
||||
hash_ = hashlib.md5(f_handle.read()).hexdigest()[:8]
|
||||
_STATIC_FILE_HASHES[name] = hash_
|
||||
return u'./static/{}?hash={}'.format(name, hash_)
|
||||
return f'./static/{name}?hash={hash_}'
|
||||
|
||||
|
||||
def make_app(debug=False):
|
||||
@ -754,7 +743,7 @@ def start_web_server(args):
|
||||
if args.open_ui:
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open('localhost:{}'.format(args.port))
|
||||
webbrowser.open(f'localhost:{args.port}')
|
||||
|
||||
if settings.status_use_ping:
|
||||
status_thread = PingStatusThread()
|
||||
|
@ -7,7 +7,6 @@ import time
|
||||
|
||||
from esphome.core import EsphomeError
|
||||
from esphome.helpers import is_ip_address, resolve_ip_address
|
||||
from esphome.py_compat import IS_PY2, char_to_byte
|
||||
|
||||
RESPONSE_OK = 0
|
||||
RESPONSE_REQUEST_AUTH = 1
|
||||
@ -38,7 +37,7 @@ MAGIC_BYTES = [0x6C, 0x26, 0xF7, 0x5C, 0x45]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProgressBar(object):
|
||||
class ProgressBar:
|
||||
def __init__(self):
|
||||
self.last_progress = None
|
||||
|
||||
@ -72,33 +71,31 @@ def recv_decode(sock, amount, decode=True):
|
||||
data = sock.recv(amount)
|
||||
if not decode:
|
||||
return data
|
||||
return [char_to_byte(x) for x in data]
|
||||
return list(data)
|
||||
|
||||
|
||||
def receive_exactly(sock, amount, msg, expect, decode=True):
|
||||
if decode:
|
||||
data = []
|
||||
elif IS_PY2:
|
||||
data = ''
|
||||
else:
|
||||
data = b''
|
||||
|
||||
try:
|
||||
data += recv_decode(sock, 1, decode=decode)
|
||||
except socket.error as err:
|
||||
raise OTAError("Error receiving acknowledge {}: {}".format(msg, err))
|
||||
except OSError as err:
|
||||
raise OTAError(f"Error receiving acknowledge {msg}: {err}")
|
||||
|
||||
try:
|
||||
check_error(data, expect)
|
||||
except OTAError as err:
|
||||
sock.close()
|
||||
raise OTAError("Error {}: {}".format(msg, err))
|
||||
raise OTAError(f"Error {msg}: {err}")
|
||||
|
||||
while len(data) < amount:
|
||||
try:
|
||||
data += recv_decode(sock, amount - len(data), decode=decode)
|
||||
except socket.error as err:
|
||||
raise OTAError("Error receiving {}: {}".format(msg, err))
|
||||
except OSError as err:
|
||||
raise OTAError(f"Error receiving {msg}: {err}")
|
||||
return data
|
||||
|
||||
|
||||
@ -145,12 +142,6 @@ def check_error(data, expect):
|
||||
|
||||
def send_check(sock, data, msg):
|
||||
try:
|
||||
if IS_PY2:
|
||||
if isinstance(data, (list, tuple)):
|
||||
data = ''.join([chr(x) for x in data])
|
||||
elif isinstance(data, int):
|
||||
data = chr(data)
|
||||
else:
|
||||
if isinstance(data, (list, tuple)):
|
||||
data = bytes(data)
|
||||
elif isinstance(data, int):
|
||||
@ -159,8 +150,8 @@ def send_check(sock, data, msg):
|
||||
data = data.encode('utf8')
|
||||
|
||||
sock.sendall(data)
|
||||
except socket.error as err:
|
||||
raise OTAError("Error sending {}: {}".format(msg, err))
|
||||
except OSError as err:
|
||||
raise OTAError(f"Error sending {msg}: {err}")
|
||||
|
||||
|
||||
def perform_ota(sock, password, file_handle, filename):
|
||||
@ -176,7 +167,7 @@ def perform_ota(sock, password, file_handle, filename):
|
||||
|
||||
_, version = receive_exactly(sock, 2, 'version', RESPONSE_OK)
|
||||
if version != OTA_VERSION_1_0:
|
||||
raise OTAError("Unsupported OTA version {}".format(version))
|
||||
raise OTAError(f"Unsupported OTA version {version}")
|
||||
|
||||
# Features
|
||||
send_check(sock, 0x00, 'features')
|
||||
@ -186,9 +177,7 @@ def perform_ota(sock, password, file_handle, filename):
|
||||
if auth == RESPONSE_REQUEST_AUTH:
|
||||
if not password:
|
||||
raise OTAError("ESP requests password, but no password given!")
|
||||
nonce = receive_exactly(sock, 32, 'authentication nonce', [], decode=False)
|
||||
if not IS_PY2:
|
||||
nonce = nonce.decode()
|
||||
nonce = receive_exactly(sock, 32, 'authentication nonce', [], decode=False).decode()
|
||||
_LOGGER.debug("Auth: Nonce is %s", nonce)
|
||||
cnonce = hashlib.md5(str(random.random()).encode()).hexdigest()
|
||||
_LOGGER.debug("Auth: CNonce is %s", cnonce)
|
||||
@ -235,9 +224,9 @@ def perform_ota(sock, password, file_handle, filename):
|
||||
|
||||
try:
|
||||
sock.sendall(chunk)
|
||||
except socket.error as err:
|
||||
except OSError as err:
|
||||
sys.stderr.write('\n')
|
||||
raise OTAError("Error sending data: {}".format(err))
|
||||
raise OTAError(f"Error sending data: {err}")
|
||||
|
||||
progress.update(offset / float(file_size))
|
||||
progress.done()
|
||||
@ -277,7 +266,7 @@ def run_ota_impl_(remote_host, remote_port, password, filename):
|
||||
sock.settimeout(10.0)
|
||||
try:
|
||||
sock.connect((ip, remote_port))
|
||||
except socket.error as err:
|
||||
except OSError as err:
|
||||
sock.close()
|
||||
_LOGGER.error("Connecting to %s:%s failed: %s", remote_host, remote_port, err)
|
||||
return 1
|
||||
|
@ -1,12 +1,8 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import codecs
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from esphome.py_compat import char_to_byte, text_type, IS_PY2, encode_text
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -18,24 +14,24 @@ def ensure_unique_string(preferred_string, current_strings):
|
||||
|
||||
while test_string in current_strings_set:
|
||||
tries += 1
|
||||
test_string = u"{}_{}".format(preferred_string, tries)
|
||||
test_string = f"{preferred_string}_{tries}"
|
||||
|
||||
return test_string
|
||||
|
||||
|
||||
def indent_all_but_first_and_last(text, padding=u' '):
|
||||
def indent_all_but_first_and_last(text, padding=' '):
|
||||
lines = text.splitlines(True)
|
||||
if len(lines) <= 2:
|
||||
return text
|
||||
return lines[0] + u''.join(padding + line for line in lines[1:-1]) + lines[-1]
|
||||
return lines[0] + ''.join(padding + line for line in lines[1:-1]) + lines[-1]
|
||||
|
||||
|
||||
def indent_list(text, padding=u' '):
|
||||
def indent_list(text, padding=' '):
|
||||
return [padding + line for line in text.splitlines()]
|
||||
|
||||
|
||||
def indent(text, padding=u' '):
|
||||
return u'\n'.join(indent_list(text, padding))
|
||||
def indent(text, padding=' '):
|
||||
return '\n'.join(indent_list(text, padding))
|
||||
|
||||
|
||||
# From https://stackoverflow.com/a/14945195/8924614
|
||||
@ -43,17 +39,16 @@ def cpp_string_escape(string, encoding='utf-8'):
|
||||
def _should_escape(byte): # type: (int) -> bool
|
||||
if not 32 <= byte < 127:
|
||||
return True
|
||||
if byte in (char_to_byte('\\'), char_to_byte('"')):
|
||||
if byte in (ord('\\'), ord('"')):
|
||||
return True
|
||||
return False
|
||||
|
||||
if isinstance(string, text_type):
|
||||
if isinstance(string, str):
|
||||
string = string.encode(encoding)
|
||||
result = ''
|
||||
for character in string:
|
||||
character = char_to_byte(character)
|
||||
if _should_escape(character):
|
||||
result += '\\%03o' % character
|
||||
result += f'\\{character:03o}'
|
||||
else:
|
||||
result += chr(character)
|
||||
return '"' + result + '"'
|
||||
@ -91,7 +86,7 @@ def mkdir_p(path):
|
||||
pass
|
||||
else:
|
||||
from esphome.core import EsphomeError
|
||||
raise EsphomeError(u"Error creating directories {}: {}".format(path, err))
|
||||
raise EsphomeError(f"Error creating directories {path}: {err}")
|
||||
|
||||
|
||||
def is_ip_address(host):
|
||||
@ -118,7 +113,7 @@ def _resolve_with_zeroconf(host):
|
||||
try:
|
||||
info = zc.resolve_host(host + '.')
|
||||
except Exception as err:
|
||||
raise EsphomeError("Error resolving mDNS hostname: {}".format(err))
|
||||
raise EsphomeError(f"Error resolving mDNS hostname: {err}")
|
||||
finally:
|
||||
zc.close()
|
||||
if info is None:
|
||||
@ -141,7 +136,7 @@ def resolve_ip_address(host):
|
||||
|
||||
try:
|
||||
return socket.gethostbyname(host)
|
||||
except socket.error as err:
|
||||
except OSError as err:
|
||||
errs.append(str(err))
|
||||
raise EsphomeError("Error resolving IP address: {}"
|
||||
"".format(', '.join(errs)))
|
||||
@ -167,10 +162,10 @@ def read_file(path):
|
||||
return f_handle.read()
|
||||
except OSError as err:
|
||||
from esphome.core import EsphomeError
|
||||
raise EsphomeError(u"Error reading file {}: {}".format(path, err))
|
||||
raise EsphomeError(f"Error reading file {path}: {err}")
|
||||
except UnicodeDecodeError as err:
|
||||
from esphome.core import EsphomeError
|
||||
raise EsphomeError(u"Error reading file {}: {}".format(path, err))
|
||||
raise EsphomeError(f"Error reading file {path}: {err}")
|
||||
|
||||
|
||||
def _write_file(path, text):
|
||||
@ -179,18 +174,15 @@ def _write_file(path, text):
|
||||
mkdir_p(directory)
|
||||
|
||||
tmp_path = None
|
||||
data = encode_text(text)
|
||||
data = text
|
||||
if isinstance(text, str):
|
||||
data = text.encode()
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(mode="wb", dir=directory, delete=False) as f_handle:
|
||||
tmp_path = f_handle.name
|
||||
f_handle.write(data)
|
||||
# Newer tempfile implementations create the file with mode 0o600
|
||||
os.chmod(tmp_path, 0o644)
|
||||
if IS_PY2:
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
os.rename(tmp_path, path)
|
||||
else:
|
||||
# If destination exists, will be overwritten
|
||||
os.replace(tmp_path, path)
|
||||
finally:
|
||||
@ -206,7 +198,7 @@ def write_file(path, text):
|
||||
_write_file(path, text)
|
||||
except OSError:
|
||||
from esphome.core import EsphomeError
|
||||
raise EsphomeError(u"Could not write file at {}".format(path))
|
||||
raise EsphomeError(f"Could not write file at {path}")
|
||||
|
||||
|
||||
def write_file_if_changed(path, text):
|
||||
@ -226,7 +218,7 @@ def copy_file_if_changed(src, dst):
|
||||
shutil.copy(src, dst)
|
||||
except OSError as err:
|
||||
from esphome.core import EsphomeError
|
||||
raise EsphomeError(u"Error copying file {} to {}: {}".format(src, dst, err))
|
||||
raise EsphomeError(f"Error copying file {src} to {dst}: {err}")
|
||||
|
||||
|
||||
def list_starts_with(list_, sub):
|
||||
@ -273,10 +265,6 @@ _TYPE_OVERLOADS = {
|
||||
list: type('EList', (list,), dict()),
|
||||
}
|
||||
|
||||
if IS_PY2:
|
||||
_TYPE_OVERLOADS[long] = type('long', (long,), dict())
|
||||
_TYPE_OVERLOADS[unicode] = type('unicode', (unicode,), dict())
|
||||
|
||||
# cache created classes here
|
||||
_CLASS_LOOKUP = {}
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
|
||||
|
||||
|
@ -1,9 +1,6 @@
|
||||
from __future__ import print_function
|
||||
|
||||
from datetime import datetime
|
||||
import hashlib
|
||||
import logging
|
||||
import socket
|
||||
import ssl
|
||||
import sys
|
||||
import time
|
||||
@ -15,7 +12,6 @@ from esphome.const import CONF_BROKER, CONF_DISCOVERY_PREFIX, CONF_ESPHOME, \
|
||||
CONF_TOPIC, CONF_TOPIC_PREFIX, CONF_USERNAME
|
||||
from esphome.core import CORE, EsphomeError
|
||||
from esphome.helpers import color
|
||||
from esphome.py_compat import decode_text
|
||||
from esphome.util import safe_print
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -37,7 +33,7 @@ def initialize(config, subscriptions, on_message, username, password, client_id)
|
||||
if client.reconnect() == 0:
|
||||
_LOGGER.info("Successfully reconnected to the MQTT server")
|
||||
break
|
||||
except socket.error:
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
wait_time = min(2**tries, 300)
|
||||
@ -47,7 +43,7 @@ def initialize(config, subscriptions, on_message, username, password, client_id)
|
||||
time.sleep(wait_time)
|
||||
tries += 1
|
||||
|
||||
client = mqtt.Client(client_id or u'')
|
||||
client = mqtt.Client(client_id or '')
|
||||
client.on_connect = on_connect
|
||||
client.on_message = on_message
|
||||
client.on_disconnect = on_disconnect
|
||||
@ -70,8 +66,8 @@ def initialize(config, subscriptions, on_message, username, password, client_id)
|
||||
host = str(config[CONF_MQTT][CONF_BROKER])
|
||||
port = int(config[CONF_MQTT][CONF_PORT])
|
||||
client.connect(host, port)
|
||||
except socket.error as err:
|
||||
raise EsphomeError("Cannot connect to MQTT broker: {}".format(err))
|
||||
except OSError as err:
|
||||
raise EsphomeError(f"Cannot connect to MQTT broker: {err}")
|
||||
|
||||
try:
|
||||
client.loop_forever()
|
||||
@ -88,17 +84,17 @@ def show_logs(config, topic=None, username=None, password=None, client_id=None):
|
||||
if CONF_LOG_TOPIC in conf:
|
||||
topic = config[CONF_MQTT][CONF_LOG_TOPIC][CONF_TOPIC]
|
||||
elif CONF_TOPIC_PREFIX in config[CONF_MQTT]:
|
||||
topic = config[CONF_MQTT][CONF_TOPIC_PREFIX] + u'/debug'
|
||||
topic = config[CONF_MQTT][CONF_TOPIC_PREFIX] + '/debug'
|
||||
else:
|
||||
topic = config[CONF_ESPHOME][CONF_NAME] + u'/debug'
|
||||
topic = config[CONF_ESPHOME][CONF_NAME] + '/debug'
|
||||
else:
|
||||
_LOGGER.error(u"MQTT isn't setup, can't start MQTT logs")
|
||||
_LOGGER.error("MQTT isn't setup, can't start MQTT logs")
|
||||
return 1
|
||||
_LOGGER.info(u"Starting log output from %s", topic)
|
||||
_LOGGER.info("Starting log output from %s", topic)
|
||||
|
||||
def on_message(client, userdata, msg):
|
||||
time_ = datetime.now().time().strftime(u'[%H:%M:%S]')
|
||||
payload = decode_text(msg.payload)
|
||||
time_ = datetime.now().time().strftime('[%H:%M:%S]')
|
||||
payload = msg.payload.decode(errors='backslashreplace')
|
||||
message = time_ + payload
|
||||
safe_print(message)
|
||||
|
||||
@ -107,20 +103,20 @@ def show_logs(config, topic=None, username=None, password=None, client_id=None):
|
||||
|
||||
def clear_topic(config, topic, username=None, password=None, client_id=None):
|
||||
if topic is None:
|
||||
discovery_prefix = config[CONF_MQTT].get(CONF_DISCOVERY_PREFIX, u'homeassistant')
|
||||
discovery_prefix = config[CONF_MQTT].get(CONF_DISCOVERY_PREFIX, 'homeassistant')
|
||||
name = config[CONF_ESPHOME][CONF_NAME]
|
||||
topic = u'{}/+/{}/#'.format(discovery_prefix, name)
|
||||
_LOGGER.info(u"Clearing messages from '%s'", topic)
|
||||
_LOGGER.info(u"Please close this window when no more messages appear and the "
|
||||
u"MQTT topic has been cleared of retained messages.")
|
||||
topic = f'{discovery_prefix}/+/{name}/#'
|
||||
_LOGGER.info("Clearing messages from '%s'", topic)
|
||||
_LOGGER.info("Please close this window when no more messages appear and the "
|
||||
"MQTT topic has been cleared of retained messages.")
|
||||
|
||||
def on_message(client, userdata, msg):
|
||||
if not msg.payload or not msg.retain:
|
||||
return
|
||||
try:
|
||||
print(u"Clearing topic {}".format(msg.topic))
|
||||
print(f"Clearing topic {msg.topic}")
|
||||
except UnicodeDecodeError:
|
||||
print(u"Skipping non-UTF-8 topic (prohibited by MQTT standard)")
|
||||
print("Skipping non-UTF-8 topic (prohibited by MQTT standard)")
|
||||
return
|
||||
client.publish(msg.topic, None, retain=True)
|
||||
|
||||
@ -133,14 +129,14 @@ def get_fingerprint(config):
|
||||
_LOGGER.info("Getting fingerprint from %s:%s", addr[0], addr[1])
|
||||
try:
|
||||
cert_pem = ssl.get_server_certificate(addr)
|
||||
except IOError as err:
|
||||
except OSError as err:
|
||||
_LOGGER.error("Unable to connect to server: %s", err)
|
||||
return 1
|
||||
cert_der = ssl.PEM_cert_to_DER_cert(cert_pem)
|
||||
|
||||
sha1 = hashlib.sha1(cert_der).hexdigest()
|
||||
|
||||
safe_print(u"SHA1 Fingerprint: " + color('cyan', sha1))
|
||||
safe_print(u"Copy the string above into mqtt.ssl_fingerprints section of {}"
|
||||
u"".format(CORE.config_path))
|
||||
safe_print("SHA1 Fingerprint: " + color('cyan', sha1))
|
||||
safe_print("Copy the string above into mqtt.ssl_fingerprints section of {}"
|
||||
"".format(CORE.config_path))
|
||||
return 0
|
||||
|
@ -1,5 +1,3 @@
|
||||
from __future__ import division
|
||||
|
||||
import logging
|
||||
|
||||
import esphome.config_validation as cv
|
||||
@ -271,13 +269,13 @@ def _lookup_pin(value):
|
||||
return board_pins[value]
|
||||
if value in base_pins:
|
||||
return base_pins[value]
|
||||
raise cv.Invalid(u"Cannot resolve pin name '{}' for board {}.".format(value, CORE.board))
|
||||
raise cv.Invalid(f"Cannot resolve pin name '{value}' for board {CORE.board}.")
|
||||
|
||||
|
||||
def _translate_pin(value):
|
||||
if isinstance(value, dict) or value is None:
|
||||
raise cv.Invalid(u"This variable only supports pin numbers, not full pin schemas "
|
||||
u"(with inverted and mode).")
|
||||
raise cv.Invalid("This variable only supports pin numbers, not full pin schemas "
|
||||
"(with inverted and mode).")
|
||||
if isinstance(value, int):
|
||||
return value
|
||||
try:
|
||||
@ -301,27 +299,27 @@ def validate_gpio_pin(value):
|
||||
value = _translate_pin(value)
|
||||
if CORE.is_esp32:
|
||||
if value < 0 or value > 39:
|
||||
raise cv.Invalid(u"ESP32: Invalid pin number: {}".format(value))
|
||||
raise cv.Invalid(f"ESP32: Invalid pin number: {value}")
|
||||
if value in _ESP_SDIO_PINS:
|
||||
raise cv.Invalid("This pin cannot be used on ESP32s and is already used by "
|
||||
"the flash interface (function: {})".format(_ESP_SDIO_PINS[value]))
|
||||
if 9 <= value <= 10:
|
||||
_LOGGER.warning(u"ESP32: Pin %s (9-10) might already be used by the "
|
||||
u"flash interface in QUAD IO flash mode.", value)
|
||||
_LOGGER.warning("ESP32: Pin %s (9-10) might already be used by the "
|
||||
"flash interface in QUAD IO flash mode.", value)
|
||||
if value in (20, 24, 28, 29, 30, 31):
|
||||
# These pins are not exposed in GPIO mux (reason unknown)
|
||||
# but they're missing from IO_MUX list in datasheet
|
||||
raise cv.Invalid("The pin GPIO{} is not usable on ESP32s.".format(value))
|
||||
raise cv.Invalid(f"The pin GPIO{value} is not usable on ESP32s.")
|
||||
return value
|
||||
if CORE.is_esp8266:
|
||||
if value < 0 or value > 17:
|
||||
raise cv.Invalid(u"ESP8266: Invalid pin number: {}".format(value))
|
||||
raise cv.Invalid(f"ESP8266: Invalid pin number: {value}")
|
||||
if value in _ESP_SDIO_PINS:
|
||||
raise cv.Invalid("This pin cannot be used on ESP8266s and is already used by "
|
||||
"the flash interface (function: {})".format(_ESP_SDIO_PINS[value]))
|
||||
if 9 <= value <= 10:
|
||||
_LOGGER.warning(u"ESP8266: Pin %s (9-10) might already be used by the "
|
||||
u"flash interface in QUAD IO flash mode.", value)
|
||||
_LOGGER.warning("ESP8266: Pin %s (9-10) might already be used by the "
|
||||
"flash interface in QUAD IO flash mode.", value)
|
||||
return value
|
||||
raise NotImplementedError
|
||||
|
||||
@ -349,8 +347,8 @@ def output_pin(value):
|
||||
value = validate_gpio_pin(value)
|
||||
if CORE.is_esp32:
|
||||
if 34 <= value <= 39:
|
||||
raise cv.Invalid(u"ESP32: GPIO{} (34-39) can only be used as an "
|
||||
u"input pin.".format(value))
|
||||
raise cv.Invalid("ESP32: GPIO{} (34-39) can only be used as an "
|
||||
"input pin.".format(value))
|
||||
return value
|
||||
if CORE.is_esp8266:
|
||||
if value == 17:
|
||||
@ -364,11 +362,11 @@ def analog_pin(value):
|
||||
if CORE.is_esp32:
|
||||
if 32 <= value <= 39: # ADC1
|
||||
return value
|
||||
raise cv.Invalid(u"ESP32: Only pins 32 though 39 support ADC.")
|
||||
raise cv.Invalid("ESP32: Only pins 32 though 39 support ADC.")
|
||||
if CORE.is_esp8266:
|
||||
if value == 17: # A0
|
||||
return value
|
||||
raise cv.Invalid(u"ESP8266: Only pin A0 (GPIO17) supports ADC.")
|
||||
raise cv.Invalid("ESP8266: Only pin A0 (GPIO17) supports ADC.")
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
|
@ -1,5 +1,3 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
@ -7,7 +5,6 @@ import re
|
||||
import subprocess
|
||||
|
||||
from esphome.core import CORE
|
||||
from esphome.py_compat import decode_text
|
||||
from esphome.util import run_external_command, run_external_process
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -61,6 +58,7 @@ FILTER_PLATFORMIO_LINES = [
|
||||
r'Installing dependencies',
|
||||
r'.* @ .* is already installed',
|
||||
r'Building in .* mode',
|
||||
r'Advanced Memory Usage is available via .*',
|
||||
]
|
||||
|
||||
|
||||
@ -100,8 +98,7 @@ def run_upload(config, verbose, port):
|
||||
|
||||
def run_idedata(config):
|
||||
args = ['-t', 'idedata']
|
||||
stdout = run_platformio_cli_run(config, False, *args, capture_stdout=True)
|
||||
stdout = decode_text(stdout)
|
||||
stdout = run_platformio_cli_run(config, False, *args, capture_stdout=True).decode()
|
||||
match = re.search(r'{\s*".*}', stdout)
|
||||
if match is None:
|
||||
_LOGGER.debug("Could not match IDEData for %s", stdout)
|
||||
@ -172,7 +169,7 @@ def _decode_pc(config, addr):
|
||||
return
|
||||
command = [idedata.addr2line_path, '-pfiaC', '-e', idedata.firmware_elf_path, addr]
|
||||
try:
|
||||
translation = decode_text(subprocess.check_output(command)).strip()
|
||||
translation = subprocess.check_output(command).decode().strip()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.debug("Caught exception for command %s", command, exc_info=1)
|
||||
return
|
||||
@ -246,7 +243,7 @@ def process_stacktrace(config, line, backtrace_state):
|
||||
return backtrace_state
|
||||
|
||||
|
||||
class IDEData(object):
|
||||
class IDEData:
|
||||
def __init__(self, raw):
|
||||
if not isinstance(raw, dict):
|
||||
self.raw = {}
|
||||
|
@ -1,89 +0,0 @@
|
||||
import functools
|
||||
import sys
|
||||
import codecs
|
||||
|
||||
PYTHON_MAJOR = sys.version_info[0]
|
||||
IS_PY2 = PYTHON_MAJOR == 2
|
||||
IS_PY3 = PYTHON_MAJOR == 3
|
||||
|
||||
|
||||
# pylint: disable=no-else-return
|
||||
def safe_input(prompt=None):
|
||||
if IS_PY2:
|
||||
if prompt is None:
|
||||
return raw_input()
|
||||
return raw_input(prompt)
|
||||
else:
|
||||
if prompt is None:
|
||||
return input()
|
||||
return input(prompt)
|
||||
|
||||
|
||||
if IS_PY2:
|
||||
text_type = unicode
|
||||
string_types = (str, unicode)
|
||||
integer_types = (int, long)
|
||||
binary_type = str
|
||||
else:
|
||||
text_type = str
|
||||
string_types = (str,)
|
||||
integer_types = (int,)
|
||||
binary_type = bytes
|
||||
|
||||
|
||||
def byte_to_bytes(val): # type: (int) -> bytes
|
||||
if IS_PY2:
|
||||
return chr(val)
|
||||
else:
|
||||
return bytes([val])
|
||||
|
||||
|
||||
def char_to_byte(val): # type: (str) -> int
|
||||
if IS_PY2:
|
||||
if isinstance(val, string_types):
|
||||
return ord(val)
|
||||
elif isinstance(val, int):
|
||||
return val
|
||||
else:
|
||||
raise ValueError
|
||||
else:
|
||||
if isinstance(val, str):
|
||||
return ord(val)
|
||||
elif isinstance(val, int):
|
||||
return val
|
||||
else:
|
||||
raise ValueError
|
||||
|
||||
|
||||
def format_bytes(val):
|
||||
if IS_PY2:
|
||||
return ' '.join('{:02X}'.format(ord(x)) for x in val)
|
||||
else:
|
||||
return ' '.join('{:02X}'.format(x) for x in val)
|
||||
|
||||
|
||||
def sort_by_cmp(list_, cmp):
|
||||
if IS_PY2:
|
||||
list_.sort(cmp=cmp)
|
||||
else:
|
||||
list_.sort(key=functools.cmp_to_key(cmp))
|
||||
|
||||
|
||||
def indexbytes(buf, i):
|
||||
if IS_PY3:
|
||||
return buf[i]
|
||||
else:
|
||||
return ord(buf[i])
|
||||
|
||||
|
||||
def decode_text(data, encoding='utf-8', errors='strict'):
|
||||
if isinstance(data, text_type):
|
||||
return data
|
||||
return codecs.decode(data, encoding, errors)
|
||||
|
||||
|
||||
def encode_text(data, encoding='utf-8', errors='strict'):
|
||||
if isinstance(data, binary_type):
|
||||
return data
|
||||
|
||||
return codecs.encode(data, encoding, errors)
|
@ -13,17 +13,15 @@ from esphome.helpers import write_file_if_changed
|
||||
from esphome.core import CoreType
|
||||
from typing import Any, Optional, List
|
||||
|
||||
from esphome.py_compat import text_type
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def storage_path(): # type: () -> str
|
||||
return CORE.relative_config_path('.esphome', '{}.json'.format(CORE.config_filename))
|
||||
return CORE.relative_config_path('.esphome', f'{CORE.config_filename}.json')
|
||||
|
||||
|
||||
def ext_storage_path(base_path, config_filename): # type: (str, str) -> str
|
||||
return os.path.join(base_path, '.esphome', '{}.json'.format(config_filename))
|
||||
return os.path.join(base_path, '.esphome', f'{config_filename}.json')
|
||||
|
||||
|
||||
def esphome_storage_path(base_path): # type: (str) -> str
|
||||
@ -35,7 +33,7 @@ def trash_storage_path(base_path): # type: (str) -> str
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
class StorageJSON(object):
|
||||
class StorageJSON:
|
||||
def __init__(self, storage_version, name, comment, esphome_version,
|
||||
src_version, arduino_version, address, esp_platform, board, build_path,
|
||||
firmware_bin_path, loaded_integrations):
|
||||
@ -85,7 +83,7 @@ class StorageJSON(object):
|
||||
}
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self.as_dict(), indent=2) + u'\n'
|
||||
return json.dumps(self.as_dict(), indent=2) + '\n'
|
||||
|
||||
def save(self, path):
|
||||
write_file_if_changed(path, self.to_json())
|
||||
@ -156,7 +154,7 @@ class StorageJSON(object):
|
||||
return isinstance(o, StorageJSON) and self.as_dict() == o.as_dict()
|
||||
|
||||
|
||||
class EsphomeStorageJSON(object):
|
||||
class EsphomeStorageJSON:
|
||||
def __init__(self, storage_version, cookie_secret, last_update_check,
|
||||
remote_version):
|
||||
# Version of the storage JSON schema
|
||||
@ -189,7 +187,7 @@ class EsphomeStorageJSON(object):
|
||||
self.last_update_check_str = new.strftime("%Y-%m-%dT%H:%M:%S")
|
||||
|
||||
def to_json(self): # type: () -> dict
|
||||
return json.dumps(self.as_dict(), indent=2) + u'\n'
|
||||
return json.dumps(self.as_dict(), indent=2) + '\n'
|
||||
|
||||
def save(self, path): # type: (str) -> None
|
||||
write_file_if_changed(path, self.to_json())
|
||||
@ -216,7 +214,7 @@ class EsphomeStorageJSON(object):
|
||||
def get_default(): # type: () -> EsphomeStorageJSON
|
||||
return EsphomeStorageJSON(
|
||||
storage_version=1,
|
||||
cookie_secret=text_type(binascii.hexlify(os.urandom(64))),
|
||||
cookie_secret=binascii.hexlify(os.urandom(64)).decode(),
|
||||
last_update_check=None,
|
||||
remote_version=None,
|
||||
)
|
||||
|
@ -1,5 +1,3 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import io
|
||||
import logging
|
||||
@ -9,12 +7,11 @@ import subprocess
|
||||
import sys
|
||||
|
||||
from esphome import const
|
||||
from esphome.py_compat import IS_PY2, decode_text, text_type
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RegistryEntry(object):
|
||||
class RegistryEntry:
|
||||
def __init__(self, name, fun, type_id, schema):
|
||||
self.name = name
|
||||
self.fun = fun
|
||||
@ -34,7 +31,7 @@ class RegistryEntry(object):
|
||||
|
||||
class Registry(dict):
|
||||
def __init__(self, base_schema=None, type_id_key=None):
|
||||
super(Registry, self).__init__()
|
||||
super().__init__()
|
||||
self.base_schema = base_schema or {}
|
||||
self.type_id_key = type_id_key
|
||||
|
||||
@ -81,17 +78,17 @@ def safe_print(message=""):
|
||||
|
||||
def shlex_quote(s):
|
||||
if not s:
|
||||
return u"''"
|
||||
return "''"
|
||||
if re.search(r'[^\w@%+=:,./-]', s) is None:
|
||||
return s
|
||||
|
||||
return u"'" + s.replace(u"'", u"'\"'\"'") + u"'"
|
||||
return "'" + s.replace("'", "'\"'\"'") + "'"
|
||||
|
||||
|
||||
ANSI_ESCAPE = re.compile(r'\033[@-_][0-?]*[ -/]*[@-~]')
|
||||
|
||||
|
||||
class RedirectText(object):
|
||||
class RedirectText:
|
||||
def __init__(self, out, filter_lines=None):
|
||||
self._out = out
|
||||
if filter_lines is None:
|
||||
@ -116,13 +113,12 @@ class RedirectText(object):
|
||||
self._out.write(s)
|
||||
|
||||
def write(self, s):
|
||||
# s is usually a text_type already (self._out is of type TextIOWrapper)
|
||||
# s is usually a str already (self._out is of type TextIOWrapper)
|
||||
# However, s is sometimes also a bytes object in python3. Let's make sure it's a
|
||||
# text_type
|
||||
# str
|
||||
# If the conversion fails, we will create an exception, which is okay because we won't
|
||||
# be able to print it anyway.
|
||||
text = decode_text(s)
|
||||
assert isinstance(text, text_type)
|
||||
text = s.decode()
|
||||
|
||||
if self._filter_pattern is not None:
|
||||
self._line_buffer += text
|
||||
@ -160,8 +156,8 @@ def run_external_command(func, *cmd, **kwargs):
|
||||
|
||||
orig_argv = sys.argv
|
||||
orig_exit = sys.exit # mock sys.exit
|
||||
full_cmd = u' '.join(shlex_quote(x) for x in cmd)
|
||||
_LOGGER.info(u"Running: %s", full_cmd)
|
||||
full_cmd = ' '.join(shlex_quote(x) for x in cmd)
|
||||
_LOGGER.info("Running: %s", full_cmd)
|
||||
|
||||
filter_lines = kwargs.get('filter_lines')
|
||||
orig_stdout = sys.stdout
|
||||
@ -182,8 +178,8 @@ def run_external_command(func, *cmd, **kwargs):
|
||||
except SystemExit as err:
|
||||
return err.args[0]
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.error(u"Running command failed: %s", err)
|
||||
_LOGGER.error(u"Please try running %s locally.", full_cmd)
|
||||
_LOGGER.error("Running command failed: %s", err)
|
||||
_LOGGER.error("Please try running %s locally.", full_cmd)
|
||||
return 1
|
||||
finally:
|
||||
sys.argv = orig_argv
|
||||
@ -198,8 +194,8 @@ def run_external_command(func, *cmd, **kwargs):
|
||||
|
||||
|
||||
def run_external_process(*cmd, **kwargs):
|
||||
full_cmd = u' '.join(shlex_quote(x) for x in cmd)
|
||||
_LOGGER.info(u"Running: %s", full_cmd)
|
||||
full_cmd = ' '.join(shlex_quote(x) for x in cmd)
|
||||
_LOGGER.info("Running: %s", full_cmd)
|
||||
filter_lines = kwargs.get('filter_lines')
|
||||
|
||||
capture_stdout = kwargs.get('capture_stdout', False)
|
||||
@ -215,8 +211,8 @@ def run_external_process(*cmd, **kwargs):
|
||||
stdout=sub_stdout,
|
||||
stderr=sub_stderr)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.error(u"Running command failed: %s", err)
|
||||
_LOGGER.error(u"Please try running %s locally.", full_cmd)
|
||||
_LOGGER.error("Running command failed: %s", err)
|
||||
_LOGGER.error("Please try running %s locally.", full_cmd)
|
||||
return 1
|
||||
finally:
|
||||
if capture_stdout:
|
||||
@ -233,29 +229,6 @@ class OrderedDict(collections.OrderedDict):
|
||||
def __repr__(self):
|
||||
return dict(self).__repr__()
|
||||
|
||||
def move_to_end(self, key, last=True):
|
||||
if IS_PY2:
|
||||
if len(self) == 1:
|
||||
return
|
||||
if last:
|
||||
# When moving to end, just pop and re-add
|
||||
val = self.pop(key)
|
||||
self[key] = val
|
||||
else:
|
||||
# When moving to front, use internals here
|
||||
# https://stackoverflow.com/a/16664932
|
||||
root = self._OrderedDict__root # pylint: disable=no-member
|
||||
first = root[1]
|
||||
link = self._OrderedDict__map[key] # pylint: disable=no-member
|
||||
link_prev, link_next, _ = link
|
||||
link_prev[1] = link_next
|
||||
link_next[0] = link_prev
|
||||
link[0] = root
|
||||
link[1] = first
|
||||
root[1] = first[0] = link
|
||||
else:
|
||||
super(OrderedDict, self).move_to_end(key, last=last) # pylint: disable=no-member
|
||||
|
||||
|
||||
def list_yaml_files(folder):
|
||||
files = filter_yaml_files([os.path.join(folder, p) for p in os.listdir(folder)])
|
||||
|
@ -3,8 +3,6 @@ import itertools
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from esphome.py_compat import string_types
|
||||
|
||||
|
||||
class ExtraKeysInvalid(vol.Invalid):
|
||||
def __init__(self, *arg, **kwargs):
|
||||
@ -22,14 +20,14 @@ def ensure_multiple_invalid(err):
|
||||
class _Schema(vol.Schema):
|
||||
"""Custom cv.Schema that prints similar keys on error."""
|
||||
def __init__(self, schema, required=False, extra=vol.PREVENT_EXTRA, extra_schemas=None):
|
||||
super(_Schema, self).__init__(schema, required=required, extra=extra)
|
||||
super().__init__(schema, required=required, extra=extra)
|
||||
# List of extra schemas to apply after validation
|
||||
# Should be used sparingly, as it's not a very voluptuous-way/clean way of
|
||||
# doing things.
|
||||
self._extra_schemas = extra_schemas or []
|
||||
|
||||
def __call__(self, data):
|
||||
res = super(_Schema, self).__call__(data)
|
||||
res = super().__call__(data)
|
||||
for extra in self._extra_schemas:
|
||||
try:
|
||||
res = extra(res)
|
||||
@ -51,10 +49,10 @@ class _Schema(vol.Schema):
|
||||
raise ValueError("All schema keys must be wrapped in cv.Required or cv.Optional")
|
||||
|
||||
# Keys that may be required
|
||||
all_required_keys = set(key for key in schema if isinstance(key, vol.Required))
|
||||
all_required_keys = {key for key in schema if isinstance(key, vol.Required)}
|
||||
|
||||
# Keys that may have defaults
|
||||
all_default_keys = set(key for key in schema if isinstance(key, vol.Optional))
|
||||
all_default_keys = {key for key in schema if isinstance(key, vol.Optional)}
|
||||
|
||||
# Recursively compile schema
|
||||
_compiled_schema = {}
|
||||
@ -84,9 +82,9 @@ class _Schema(vol.Schema):
|
||||
|
||||
key_names = []
|
||||
for skey in schema:
|
||||
if isinstance(skey, string_types):
|
||||
if isinstance(skey, str):
|
||||
key_names.append(skey)
|
||||
elif isinstance(skey, vol.Marker) and isinstance(skey.schema, string_types):
|
||||
elif isinstance(skey, vol.Marker) and isinstance(skey.schema, str):
|
||||
key_names.append(skey.schema)
|
||||
|
||||
def validate_mapping(path, iterable, out):
|
||||
@ -156,7 +154,7 @@ class _Schema(vol.Schema):
|
||||
if self.extra == vol.ALLOW_EXTRA:
|
||||
out[key] = value
|
||||
elif self.extra != vol.REMOVE_EXTRA:
|
||||
if isinstance(key, string_types) and key_names:
|
||||
if isinstance(key, str) and key_names:
|
||||
matches = difflib.get_close_matches(key, key_names)
|
||||
errors.append(ExtraKeysInvalid('extra keys not allowed', key_path,
|
||||
candidates=matches))
|
||||
@ -195,5 +193,5 @@ class _Schema(vol.Schema):
|
||||
schema = schemas[0]
|
||||
if isinstance(schema, vol.Schema):
|
||||
schema = schema.schema
|
||||
ret = super(_Schema, self).extend(schema, extra=extra)
|
||||
ret = super().extend(schema, extra=extra)
|
||||
return _Schema(ret.schema, extra=ret.extra, extra_schemas=self._extra_schemas)
|
||||
|
@ -1,19 +1,17 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from esphome.config import load_config, _format_vol_invalid, Config
|
||||
from esphome.core import CORE, DocumentRange
|
||||
from esphome.py_compat import text_type, safe_input
|
||||
import esphome.config_validation as cv
|
||||
|
||||
# pylint: disable=unused-import, wrong-import-order
|
||||
import voluptuous as vol
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def _get_invalid_range(res, invalid):
|
||||
# type: (Config, vol.Invalid) -> Optional[DocumentRange]
|
||||
# type: (Config, cv.Invalid) -> Optional[DocumentRange]
|
||||
return res.get_deepest_document_range_for_path(invalid.path)
|
||||
|
||||
|
||||
@ -30,7 +28,7 @@ def _dump_range(range):
|
||||
}
|
||||
|
||||
|
||||
class VSCodeResult(object):
|
||||
class VSCodeResult:
|
||||
def __init__(self):
|
||||
self.yaml_errors = []
|
||||
self.validation_errors = []
|
||||
@ -57,7 +55,7 @@ class VSCodeResult(object):
|
||||
def read_config(args):
|
||||
while True:
|
||||
CORE.reset()
|
||||
data = json.loads(safe_input())
|
||||
data = json.loads(input())
|
||||
assert data['type'] == 'validate'
|
||||
CORE.vscode = True
|
||||
CORE.ace = args.ace
|
||||
@ -70,7 +68,7 @@ def read_config(args):
|
||||
try:
|
||||
res = load_config()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
vs.add_yaml_error(text_type(err))
|
||||
vs.add_yaml_error(str(err))
|
||||
else:
|
||||
for err in res.errors:
|
||||
try:
|
||||
|
@ -1,5 +1,3 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
@ -11,7 +9,6 @@ import esphome.config_validation as cv
|
||||
from esphome.helpers import color, get_bool_env, write_file
|
||||
# pylint: disable=anomalous-backslash-in-string
|
||||
from esphome.pins import ESP32_BOARD_PINS, ESP8266_BOARD_PINS
|
||||
from esphome.py_compat import safe_input, text_type
|
||||
from esphome.storage_json import StorageJSON, ext_storage_path
|
||||
from esphome.util import safe_print
|
||||
|
||||
@ -44,7 +41,7 @@ OTA_BIG = r""" ____ _______
|
||||
\____/ |_/_/ \_\\
|
||||
"""
|
||||
|
||||
BASE_CONFIG = u"""esphome:
|
||||
BASE_CONFIG = """esphome:
|
||||
name: {name}
|
||||
platform: {platform}
|
||||
board: {board}
|
||||
@ -75,7 +72,7 @@ def sanitize_double_quotes(value):
|
||||
def wizard_file(**kwargs):
|
||||
letters = string.ascii_letters + string.digits
|
||||
ap_name_base = kwargs['name'].replace('_', ' ').title()
|
||||
ap_name = "{} Fallback Hotspot".format(ap_name_base)
|
||||
ap_name = f"{ap_name_base} Fallback Hotspot"
|
||||
if len(ap_name) > 32:
|
||||
ap_name = ap_name_base
|
||||
kwargs['fallback_name'] = ap_name
|
||||
@ -84,9 +81,9 @@ def wizard_file(**kwargs):
|
||||
config = BASE_CONFIG.format(**kwargs)
|
||||
|
||||
if kwargs['password']:
|
||||
config += u' password: "{0}"\n\nota:\n password: "{0}"\n'.format(kwargs['password'])
|
||||
config += ' password: "{0}"\n\nota:\n password: "{0}"\n'.format(kwargs['password'])
|
||||
else:
|
||||
config += u"\nota:\n"
|
||||
config += "\nota:\n"
|
||||
|
||||
return config
|
||||
|
||||
@ -119,7 +116,7 @@ else:
|
||||
def safe_print_step(step, big):
|
||||
safe_print()
|
||||
safe_print()
|
||||
safe_print("============= STEP {} =============".format(step))
|
||||
safe_print(f"============= STEP {step} =============")
|
||||
safe_print(big)
|
||||
safe_print("===================================")
|
||||
sleep(0.25)
|
||||
@ -127,24 +124,24 @@ def safe_print_step(step, big):
|
||||
|
||||
def default_input(text, default):
|
||||
safe_print()
|
||||
safe_print(u"Press ENTER for default ({})".format(default))
|
||||
return safe_input(text.format(default)) or default
|
||||
safe_print(f"Press ENTER for default ({default})")
|
||||
return input(text.format(default)) or default
|
||||
|
||||
|
||||
# From https://stackoverflow.com/a/518232/8924614
|
||||
def strip_accents(value):
|
||||
return u''.join(c for c in unicodedata.normalize('NFD', text_type(value))
|
||||
return ''.join(c for c in unicodedata.normalize('NFD', str(value))
|
||||
if unicodedata.category(c) != 'Mn')
|
||||
|
||||
|
||||
def wizard(path):
|
||||
if not path.endswith('.yaml') and not path.endswith('.yml'):
|
||||
safe_print(u"Please make your configuration file {} have the extension .yaml or .yml"
|
||||
u"".format(color('cyan', path)))
|
||||
safe_print("Please make your configuration file {} have the extension .yaml or .yml"
|
||||
"".format(color('cyan', path)))
|
||||
return 1
|
||||
if os.path.exists(path):
|
||||
safe_print(u"Uh oh, it seems like {} already exists, please delete that file first "
|
||||
u"or chose another configuration file.".format(color('cyan', path)))
|
||||
safe_print("Uh oh, it seems like {} already exists, please delete that file first "
|
||||
"or chose another configuration file.".format(color('cyan', path)))
|
||||
return 1
|
||||
safe_print("Hi there!")
|
||||
sleep(1.5)
|
||||
@ -164,21 +161,21 @@ def wizard(path):
|
||||
color('bold_white', "livingroom")))
|
||||
safe_print()
|
||||
sleep(1)
|
||||
name = safe_input(color("bold_white", "(name): "))
|
||||
name = input(color("bold_white", "(name): "))
|
||||
while True:
|
||||
try:
|
||||
name = cv.valid_name(name)
|
||||
break
|
||||
except vol.Invalid:
|
||||
safe_print(color("red", u"Oh noes, \"{}\" isn't a valid name. Names can only include "
|
||||
u"numbers, letters and underscores.".format(name)))
|
||||
safe_print(color("red", "Oh noes, \"{}\" isn't a valid name. Names can only include "
|
||||
"numbers, letters and underscores.".format(name)))
|
||||
name = strip_accents(name).replace(' ', '_')
|
||||
name = u''.join(c for c in name if c in cv.ALLOWED_NAME_CHARS)
|
||||
safe_print(u"Shall I use \"{}\" as the name instead?".format(color('cyan', name)))
|
||||
name = ''.join(c for c in name if c in cv.ALLOWED_NAME_CHARS)
|
||||
safe_print("Shall I use \"{}\" as the name instead?".format(color('cyan', name)))
|
||||
sleep(0.5)
|
||||
name = default_input(u"(name [{}]): ", name)
|
||||
name = default_input("(name [{}]): ", name)
|
||||
|
||||
safe_print(u"Great! Your node is now called \"{}\".".format(color('cyan', name)))
|
||||
safe_print("Great! Your node is now called \"{}\".".format(color('cyan', name)))
|
||||
sleep(1)
|
||||
safe_print_step(2, ESP_BIG)
|
||||
safe_print("Now I'd like to know what microcontroller you're using so that I can compile "
|
||||
@ -189,14 +186,14 @@ def wizard(path):
|
||||
sleep(0.5)
|
||||
safe_print()
|
||||
safe_print("Please enter either ESP32 or ESP8266.")
|
||||
platform = safe_input(color("bold_white", "(ESP32/ESP8266): "))
|
||||
platform = input(color("bold_white", "(ESP32/ESP8266): "))
|
||||
try:
|
||||
platform = vol.All(vol.Upper, vol.Any('ESP32', 'ESP8266'))(platform)
|
||||
break
|
||||
except vol.Invalid:
|
||||
safe_print(u"Unfortunately, I can't find an espressif microcontroller called "
|
||||
u"\"{}\". Please try again.".format(platform))
|
||||
safe_print(u"Thanks! You've chosen {} as your platform.".format(color('cyan', platform)))
|
||||
safe_print("Unfortunately, I can't find an espressif microcontroller called "
|
||||
"\"{}\". Please try again.".format(platform))
|
||||
safe_print("Thanks! You've chosen {} as your platform.".format(color('cyan', platform)))
|
||||
safe_print()
|
||||
sleep(1)
|
||||
|
||||
@ -221,17 +218,17 @@ def wizard(path):
|
||||
safe_print("Options: {}".format(', '.join(sorted(boards))))
|
||||
|
||||
while True:
|
||||
board = safe_input(color("bold_white", "(board): "))
|
||||
board = input(color("bold_white", "(board): "))
|
||||
try:
|
||||
board = vol.All(vol.Lower, vol.Any(*boards))(board)
|
||||
break
|
||||
except vol.Invalid:
|
||||
safe_print(color('red', "Sorry, I don't think the board \"{}\" exists.".format(board)))
|
||||
safe_print(color('red', f"Sorry, I don't think the board \"{board}\" exists."))
|
||||
safe_print()
|
||||
sleep(0.25)
|
||||
safe_print()
|
||||
|
||||
safe_print(u"Way to go! You've chosen {} as your board.".format(color('cyan', board)))
|
||||
safe_print("Way to go! You've chosen {} as your board.".format(color('cyan', board)))
|
||||
safe_print()
|
||||
sleep(1)
|
||||
|
||||
@ -241,22 +238,22 @@ def wizard(path):
|
||||
safe_print()
|
||||
sleep(1)
|
||||
safe_print("First, what's the " + color('green', 'SSID') +
|
||||
u" (the name) of the WiFi network {} I should connect to?".format(name))
|
||||
f" (the name) of the WiFi network {name} I should connect to?")
|
||||
sleep(1.5)
|
||||
safe_print("For example \"{}\".".format(color('bold_white', "Abraham Linksys")))
|
||||
while True:
|
||||
ssid = safe_input(color('bold_white', "(ssid): "))
|
||||
ssid = input(color('bold_white', "(ssid): "))
|
||||
try:
|
||||
ssid = cv.ssid(ssid)
|
||||
break
|
||||
except vol.Invalid:
|
||||
safe_print(color('red', u"Unfortunately, \"{}\" doesn't seem to be a valid SSID. "
|
||||
u"Please try again.".format(ssid)))
|
||||
safe_print(color('red', "Unfortunately, \"{}\" doesn't seem to be a valid SSID. "
|
||||
"Please try again.".format(ssid)))
|
||||
safe_print()
|
||||
sleep(1)
|
||||
|
||||
safe_print(u"Thank you very much! You've just chosen \"{}\" as your SSID."
|
||||
u"".format(color('cyan', ssid)))
|
||||
safe_print("Thank you very much! You've just chosen \"{}\" as your SSID."
|
||||
"".format(color('cyan', ssid)))
|
||||
safe_print()
|
||||
sleep(0.75)
|
||||
|
||||
@ -265,7 +262,7 @@ def wizard(path):
|
||||
safe_print()
|
||||
safe_print("For example \"{}\"".format(color('bold_white', 'PASSWORD42')))
|
||||
sleep(0.5)
|
||||
psk = safe_input(color('bold_white', '(PSK): '))
|
||||
psk = input(color('bold_white', '(PSK): '))
|
||||
safe_print("Perfect! WiFi is now set up (you can create static IPs and so on later).")
|
||||
sleep(1.5)
|
||||
|
||||
@ -277,7 +274,7 @@ def wizard(path):
|
||||
safe_print()
|
||||
sleep(0.25)
|
||||
safe_print("Press ENTER for no password")
|
||||
password = safe_input(color('bold_white', '(password): '))
|
||||
password = input(color('bold_white', '(password): '))
|
||||
|
||||
wizard_write(path=path, name=name, platform=platform, board=board,
|
||||
ssid=ssid, psk=psk, password=password)
|
||||
|
@ -1,5 +1,3 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
@ -14,19 +12,19 @@ from esphome.storage_json import StorageJSON, storage_path
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CPP_AUTO_GENERATE_BEGIN = u'// ========== AUTO GENERATED CODE BEGIN ==========='
|
||||
CPP_AUTO_GENERATE_END = u'// =========== AUTO GENERATED CODE END ============'
|
||||
CPP_INCLUDE_BEGIN = u'// ========== AUTO GENERATED INCLUDE BLOCK BEGIN ==========='
|
||||
CPP_INCLUDE_END = u'// ========== AUTO GENERATED INCLUDE BLOCK END ==========='
|
||||
INI_AUTO_GENERATE_BEGIN = u'; ========== AUTO GENERATED CODE BEGIN ==========='
|
||||
INI_AUTO_GENERATE_END = u'; =========== AUTO GENERATED CODE END ============'
|
||||
CPP_AUTO_GENERATE_BEGIN = '// ========== AUTO GENERATED CODE BEGIN ==========='
|
||||
CPP_AUTO_GENERATE_END = '// =========== AUTO GENERATED CODE END ============'
|
||||
CPP_INCLUDE_BEGIN = '// ========== AUTO GENERATED INCLUDE BLOCK BEGIN ==========='
|
||||
CPP_INCLUDE_END = '// ========== AUTO GENERATED INCLUDE BLOCK END ==========='
|
||||
INI_AUTO_GENERATE_BEGIN = '; ========== AUTO GENERATED CODE BEGIN ==========='
|
||||
INI_AUTO_GENERATE_END = '; =========== AUTO GENERATED CODE END ============'
|
||||
|
||||
CPP_BASE_FORMAT = (u"""// Auto generated code by esphome
|
||||
""", u""""
|
||||
CPP_BASE_FORMAT = ("""// Auto generated code by esphome
|
||||
""", """"
|
||||
|
||||
void setup() {
|
||||
// ===== DO NOT EDIT ANYTHING BELOW THIS LINE =====
|
||||
""", u"""
|
||||
""", """
|
||||
// ========= YOU CAN EDIT AFTER THIS LINE =========
|
||||
App.setup();
|
||||
}
|
||||
@ -36,7 +34,7 @@ void loop() {
|
||||
}
|
||||
""")
|
||||
|
||||
INI_BASE_FORMAT = (u"""; Auto generated code by esphome
|
||||
INI_BASE_FORMAT = ("""; Auto generated code by esphome
|
||||
|
||||
[common]
|
||||
lib_deps =
|
||||
@ -44,7 +42,7 @@ build_flags =
|
||||
upload_flags =
|
||||
|
||||
; ===== DO NOT EDIT ANYTHING BELOW THIS LINE =====
|
||||
""", u"""
|
||||
""", """
|
||||
; ========= YOU CAN EDIT AFTER THIS LINE =========
|
||||
|
||||
""")
|
||||
@ -62,8 +60,8 @@ def get_flags(key):
|
||||
|
||||
|
||||
def get_include_text():
|
||||
include_text = u'#include "esphome.h"\n' \
|
||||
u'using namespace esphome;\n'
|
||||
include_text = '#include "esphome.h"\n' \
|
||||
'using namespace esphome;\n'
|
||||
for _, component, conf in iter_components(CORE.config):
|
||||
if not hasattr(component, 'includes'):
|
||||
continue
|
||||
@ -106,7 +104,7 @@ def migrate_src_version_0_to_1():
|
||||
|
||||
if CPP_INCLUDE_BEGIN not in content:
|
||||
content, count = replace_file_content(content, r'#include "esphomelib/application.h"',
|
||||
CPP_INCLUDE_BEGIN + u'\n' + CPP_INCLUDE_END)
|
||||
CPP_INCLUDE_BEGIN + '\n' + CPP_INCLUDE_END)
|
||||
if count == 0:
|
||||
_LOGGER.error("Migration failed. ESPHome 1.10.0 needs to have a new auto-generated "
|
||||
"include section in the %s file. Please remove %s and let it be "
|
||||
@ -160,14 +158,14 @@ def update_storage_json():
|
||||
|
||||
|
||||
def format_ini(data):
|
||||
content = u''
|
||||
content = ''
|
||||
for key, value in sorted(data.items()):
|
||||
if isinstance(value, (list, set, tuple)):
|
||||
content += u'{} =\n'.format(key)
|
||||
content += f'{key} =\n'
|
||||
for x in value:
|
||||
content += u' {}\n'.format(x)
|
||||
content += f' {x}\n'
|
||||
else:
|
||||
content += u'{} = {}\n'.format(key, value)
|
||||
content += f'{key} = {value}\n'
|
||||
return content
|
||||
|
||||
|
||||
@ -216,7 +214,7 @@ def get_ini_content():
|
||||
# data['lib_ldf_mode'] = 'chain'
|
||||
data.update(CORE.config[CONF_ESPHOME].get(CONF_PLATFORMIO_OPTIONS, {}))
|
||||
|
||||
content = u'[env:{}]\n'.format(CORE.name)
|
||||
content = f'[env:{CORE.name}]\n'
|
||||
content += format_ini(data)
|
||||
|
||||
return content
|
||||
@ -225,18 +223,18 @@ def get_ini_content():
|
||||
def find_begin_end(text, begin_s, end_s):
|
||||
begin_index = text.find(begin_s)
|
||||
if begin_index == -1:
|
||||
raise EsphomeError(u"Could not find auto generated code begin in file, either "
|
||||
u"delete the main sketch file or insert the comment again.")
|
||||
raise EsphomeError("Could not find auto generated code begin in file, either "
|
||||
"delete the main sketch file or insert the comment again.")
|
||||
if text.find(begin_s, begin_index + 1) != -1:
|
||||
raise EsphomeError(u"Found multiple auto generate code begins, don't know "
|
||||
u"which to chose, please remove one of them.")
|
||||
raise EsphomeError("Found multiple auto generate code begins, don't know "
|
||||
"which to chose, please remove one of them.")
|
||||
end_index = text.find(end_s)
|
||||
if end_index == -1:
|
||||
raise EsphomeError(u"Could not find auto generated code end in file, either "
|
||||
u"delete the main sketch file or insert the comment again.")
|
||||
raise EsphomeError("Could not find auto generated code end in file, either "
|
||||
"delete the main sketch file or insert the comment again.")
|
||||
if text.find(end_s, end_index + 1) != -1:
|
||||
raise EsphomeError(u"Found multiple auto generate code endings, don't know "
|
||||
u"which to chose, please remove one of them.")
|
||||
raise EsphomeError("Found multiple auto generate code endings, don't know "
|
||||
"which to chose, please remove one of them.")
|
||||
|
||||
return text[:begin_index], text[(end_index + len(end_s)):]
|
||||
|
||||
@ -263,17 +261,17 @@ def write_platformio_project():
|
||||
write_platformio_ini(content)
|
||||
|
||||
|
||||
DEFINES_H_FORMAT = ESPHOME_H_FORMAT = u"""\
|
||||
DEFINES_H_FORMAT = ESPHOME_H_FORMAT = """\
|
||||
#pragma once
|
||||
{}
|
||||
"""
|
||||
VERSION_H_FORMAT = u"""\
|
||||
VERSION_H_FORMAT = """\
|
||||
#pragma once
|
||||
#define ESPHOME_VERSION "{}"
|
||||
"""
|
||||
DEFINES_H_TARGET = 'esphome/core/defines.h'
|
||||
VERSION_H_TARGET = 'esphome/core/version.h'
|
||||
ESPHOME_README_TXT = u"""
|
||||
ESPHOME_README_TXT = """
|
||||
THIS DIRECTORY IS AUTO-GENERATED, DO NOT MODIFY
|
||||
|
||||
ESPHome automatically populates the esphome/ directory, and any
|
||||
@ -298,9 +296,9 @@ def copy_src_tree():
|
||||
include_l = []
|
||||
for target, path in source_files_l:
|
||||
if os.path.splitext(path)[1] in HEADER_FILE_EXTENSIONS:
|
||||
include_l.append(u'#include "{}"'.format(target))
|
||||
include_l.append(u'')
|
||||
include_s = u'\n'.join(include_l)
|
||||
include_l.append(f'#include "{target}"')
|
||||
include_l.append('')
|
||||
include_s = '\n'.join(include_l)
|
||||
|
||||
source_files_copy = source_files.copy()
|
||||
source_files_copy.pop(DEFINES_H_TARGET)
|
||||
@ -340,7 +338,7 @@ def copy_src_tree():
|
||||
def generate_defines_h():
|
||||
define_content_l = [x.as_macro for x in CORE.defines]
|
||||
define_content_l.sort()
|
||||
return DEFINES_H_FORMAT.format(u'\n'.join(define_content_l))
|
||||
return DEFINES_H_FORMAT.format('\n'.join(define_content_l))
|
||||
|
||||
|
||||
def write_cpp(code_s):
|
||||
@ -354,11 +352,11 @@ def write_cpp(code_s):
|
||||
code_format = CPP_BASE_FORMAT
|
||||
|
||||
copy_src_tree()
|
||||
global_s = u'#include "esphome.h"\n'
|
||||
global_s = '#include "esphome.h"\n'
|
||||
global_s += CORE.cpp_global_section
|
||||
|
||||
full_file = code_format[0] + CPP_INCLUDE_BEGIN + u'\n' + global_s + CPP_INCLUDE_END
|
||||
full_file += code_format[1] + CPP_AUTO_GENERATE_BEGIN + u'\n' + code_s + CPP_AUTO_GENERATE_END
|
||||
full_file = code_format[0] + CPP_INCLUDE_BEGIN + '\n' + global_s + CPP_INCLUDE_END
|
||||
full_file += code_format[1] + CPP_AUTO_GENERATE_BEGIN + '\n' + code_s + CPP_AUTO_GENERATE_END
|
||||
full_file += code_format[2]
|
||||
write_file_if_changed(path, full_file)
|
||||
|
||||
|
@ -1,5 +1,3 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import fnmatch
|
||||
import functools
|
||||
import inspect
|
||||
@ -15,7 +13,6 @@ from esphome import core
|
||||
from esphome.config_helpers import read_config_file
|
||||
from esphome.core import EsphomeError, IPAddress, Lambda, MACAddress, TimePeriod, DocumentRange
|
||||
from esphome.helpers import add_class_to_obj
|
||||
from esphome.py_compat import text_type, IS_PY2
|
||||
from esphome.util import OrderedDict, filter_yaml_files
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -23,12 +20,12 @@ _LOGGER = logging.getLogger(__name__)
|
||||
# Mostly copied from Home Assistant because that code works fine and
|
||||
# let's not reinvent the wheel here
|
||||
|
||||
SECRET_YAML = u'secrets.yaml'
|
||||
SECRET_YAML = 'secrets.yaml'
|
||||
_SECRET_CACHE = {}
|
||||
_SECRET_VALUES = {}
|
||||
|
||||
|
||||
class ESPHomeDataBase(object):
|
||||
class ESPHomeDataBase:
|
||||
@property
|
||||
def esp_range(self):
|
||||
return getattr(self, '_esp_range', None)
|
||||
@ -38,7 +35,7 @@ class ESPHomeDataBase(object):
|
||||
self._esp_range = DocumentRange.from_marks(node.start_mark, node.end_mark)
|
||||
|
||||
|
||||
class ESPForceValue(object):
|
||||
class ESPForceValue:
|
||||
pass
|
||||
|
||||
|
||||
@ -74,27 +71,27 @@ class ESPHomeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
|
||||
|
||||
@_add_data_ref
|
||||
def construct_yaml_int(self, node):
|
||||
return super(ESPHomeLoader, self).construct_yaml_int(node)
|
||||
return super().construct_yaml_int(node)
|
||||
|
||||
@_add_data_ref
|
||||
def construct_yaml_float(self, node):
|
||||
return super(ESPHomeLoader, self).construct_yaml_float(node)
|
||||
return super().construct_yaml_float(node)
|
||||
|
||||
@_add_data_ref
|
||||
def construct_yaml_binary(self, node):
|
||||
return super(ESPHomeLoader, self).construct_yaml_binary(node)
|
||||
return super().construct_yaml_binary(node)
|
||||
|
||||
@_add_data_ref
|
||||
def construct_yaml_omap(self, node):
|
||||
return super(ESPHomeLoader, self).construct_yaml_omap(node)
|
||||
return super().construct_yaml_omap(node)
|
||||
|
||||
@_add_data_ref
|
||||
def construct_yaml_str(self, node):
|
||||
return super(ESPHomeLoader, self).construct_yaml_str(node)
|
||||
return super().construct_yaml_str(node)
|
||||
|
||||
@_add_data_ref
|
||||
def construct_yaml_seq(self, node):
|
||||
return super(ESPHomeLoader, self).construct_yaml_seq(node)
|
||||
return super().construct_yaml_seq(node)
|
||||
|
||||
@_add_data_ref
|
||||
def construct_yaml_map(self, node):
|
||||
@ -130,12 +127,12 @@ class ESPHomeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
|
||||
hash(key)
|
||||
except TypeError:
|
||||
raise yaml.constructor.ConstructorError(
|
||||
'Invalid key "{}" (not hashable)'.format(key), key_node.start_mark)
|
||||
f'Invalid key "{key}" (not hashable)', key_node.start_mark)
|
||||
|
||||
# Check if it is a duplicate key
|
||||
if key in seen_keys:
|
||||
raise yaml.constructor.ConstructorError(
|
||||
'Duplicate key "{}"'.format(key), key_node.start_mark,
|
||||
f'Duplicate key "{key}"', key_node.start_mark,
|
||||
'NOTE: Previous declaration here:', seen_keys[key],
|
||||
)
|
||||
seen_keys[key] = key_node.start_mark
|
||||
@ -194,11 +191,11 @@ class ESPHomeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
|
||||
args = node.value.split()
|
||||
# Check for a default value
|
||||
if len(args) > 1:
|
||||
return os.getenv(args[0], u' '.join(args[1:]))
|
||||
return os.getenv(args[0], ' '.join(args[1:]))
|
||||
if args[0] in os.environ:
|
||||
return os.environ[args[0]]
|
||||
raise yaml.MarkedYAMLError(
|
||||
u"Environment variable '{}' not defined".format(node.value), node.start_mark
|
||||
f"Environment variable '{node.value}' not defined", node.start_mark
|
||||
)
|
||||
|
||||
@property
|
||||
@ -213,10 +210,10 @@ class ESPHomeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
|
||||
secrets = _load_yaml_internal(self._rel_path(SECRET_YAML))
|
||||
if node.value not in secrets:
|
||||
raise yaml.MarkedYAMLError(
|
||||
u"Secret '{}' not defined".format(node.value), node.start_mark
|
||||
f"Secret '{node.value}' not defined", node.start_mark
|
||||
)
|
||||
val = secrets[node.value]
|
||||
_SECRET_VALUES[text_type(val)] = node.value
|
||||
_SECRET_VALUES[str(val)] = node.value
|
||||
return val
|
||||
|
||||
@_add_data_ref
|
||||
@ -259,7 +256,7 @@ class ESPHomeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
|
||||
|
||||
@_add_data_ref
|
||||
def construct_lambda(self, node):
|
||||
return Lambda(text_type(node.value))
|
||||
return Lambda(str(node.value))
|
||||
|
||||
@_add_data_ref
|
||||
def construct_force(self, node):
|
||||
@ -267,13 +264,13 @@ class ESPHomeLoader(yaml.SafeLoader): # pylint: disable=too-many-ancestors
|
||||
return add_class_to_obj(obj, ESPForceValue)
|
||||
|
||||
|
||||
ESPHomeLoader.add_constructor(u'tag:yaml.org,2002:int', ESPHomeLoader.construct_yaml_int)
|
||||
ESPHomeLoader.add_constructor(u'tag:yaml.org,2002:float', ESPHomeLoader.construct_yaml_float)
|
||||
ESPHomeLoader.add_constructor(u'tag:yaml.org,2002:binary', ESPHomeLoader.construct_yaml_binary)
|
||||
ESPHomeLoader.add_constructor(u'tag:yaml.org,2002:omap', ESPHomeLoader.construct_yaml_omap)
|
||||
ESPHomeLoader.add_constructor(u'tag:yaml.org,2002:str', ESPHomeLoader.construct_yaml_str)
|
||||
ESPHomeLoader.add_constructor(u'tag:yaml.org,2002:seq', ESPHomeLoader.construct_yaml_seq)
|
||||
ESPHomeLoader.add_constructor(u'tag:yaml.org,2002:map', ESPHomeLoader.construct_yaml_map)
|
||||
ESPHomeLoader.add_constructor('tag:yaml.org,2002:int', ESPHomeLoader.construct_yaml_int)
|
||||
ESPHomeLoader.add_constructor('tag:yaml.org,2002:float', ESPHomeLoader.construct_yaml_float)
|
||||
ESPHomeLoader.add_constructor('tag:yaml.org,2002:binary', ESPHomeLoader.construct_yaml_binary)
|
||||
ESPHomeLoader.add_constructor('tag:yaml.org,2002:omap', ESPHomeLoader.construct_yaml_omap)
|
||||
ESPHomeLoader.add_constructor('tag:yaml.org,2002:str', ESPHomeLoader.construct_yaml_str)
|
||||
ESPHomeLoader.add_constructor('tag:yaml.org,2002:seq', ESPHomeLoader.construct_yaml_seq)
|
||||
ESPHomeLoader.add_constructor('tag:yaml.org,2002:map', ESPHomeLoader.construct_yaml_map)
|
||||
ESPHomeLoader.add_constructor('!env_var', ESPHomeLoader.construct_env_var)
|
||||
ESPHomeLoader.add_constructor('!secret', ESPHomeLoader.construct_secret)
|
||||
ESPHomeLoader.add_constructor('!include', ESPHomeLoader.construct_include)
|
||||
@ -313,7 +310,7 @@ def dump(dict_):
|
||||
|
||||
def _is_file_valid(name):
|
||||
"""Decide if a file is valid."""
|
||||
return not name.startswith(u'.')
|
||||
return not name.startswith('.')
|
||||
|
||||
|
||||
def _find_files(directory, pattern):
|
||||
@ -328,7 +325,7 @@ def _find_files(directory, pattern):
|
||||
|
||||
def is_secret(value):
|
||||
try:
|
||||
return _SECRET_VALUES[text_type(value)]
|
||||
return _SECRET_VALUES[str(value)]
|
||||
except (KeyError, ValueError):
|
||||
return None
|
||||
|
||||
@ -358,31 +355,31 @@ class ESPHomeDumper(yaml.SafeDumper): # pylint: disable=too-many-ancestors
|
||||
return node
|
||||
|
||||
def represent_secret(self, value):
|
||||
return self.represent_scalar(tag=u'!secret', value=_SECRET_VALUES[text_type(value)])
|
||||
return self.represent_scalar(tag='!secret', value=_SECRET_VALUES[str(value)])
|
||||
|
||||
def represent_stringify(self, value):
|
||||
if is_secret(value):
|
||||
return self.represent_secret(value)
|
||||
return self.represent_scalar(tag=u'tag:yaml.org,2002:str', value=text_type(value))
|
||||
return self.represent_scalar(tag='tag:yaml.org,2002:str', value=str(value))
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def represent_bool(self, value):
|
||||
return self.represent_scalar(u'tag:yaml.org,2002:bool', u'true' if value else u'false')
|
||||
return self.represent_scalar('tag:yaml.org,2002:bool', 'true' if value else 'false')
|
||||
|
||||
def represent_int(self, value):
|
||||
if is_secret(value):
|
||||
return self.represent_secret(value)
|
||||
return self.represent_scalar(tag=u'tag:yaml.org,2002:int', value=text_type(value))
|
||||
return self.represent_scalar(tag='tag:yaml.org,2002:int', value=str(value))
|
||||
|
||||
def represent_float(self, value):
|
||||
if is_secret(value):
|
||||
return self.represent_secret(value)
|
||||
if math.isnan(value):
|
||||
value = u'.nan'
|
||||
value = '.nan'
|
||||
elif math.isinf(value):
|
||||
value = u'.inf' if value > 0 else u'-.inf'
|
||||
value = '.inf' if value > 0 else '-.inf'
|
||||
else:
|
||||
value = text_type(repr(value)).lower()
|
||||
value = str(repr(value)).lower()
|
||||
# Note that in some cases `repr(data)` represents a float number
|
||||
# without the decimal parts. For instance:
|
||||
# >>> repr(1e17)
|
||||
@ -390,9 +387,9 @@ class ESPHomeDumper(yaml.SafeDumper): # pylint: disable=too-many-ancestors
|
||||
# Unfortunately, this is not a valid float representation according
|
||||
# to the definition of the `!!float` tag. We fix this by adding
|
||||
# '.0' before the 'e' symbol.
|
||||
if u'.' not in value and u'e' in value:
|
||||
value = value.replace(u'e', u'.0e', 1)
|
||||
return self.represent_scalar(tag=u'tag:yaml.org,2002:float', value=value)
|
||||
if '.' not in value and 'e' in value:
|
||||
value = value.replace('e', '.0e', 1)
|
||||
return self.represent_scalar(tag='tag:yaml.org,2002:float', value=value)
|
||||
|
||||
def represent_lambda(self, value):
|
||||
if is_secret(value.value):
|
||||
@ -417,9 +414,6 @@ ESPHomeDumper.add_multi_representer(bool, ESPHomeDumper.represent_bool)
|
||||
ESPHomeDumper.add_multi_representer(str, ESPHomeDumper.represent_stringify)
|
||||
ESPHomeDumper.add_multi_representer(int, ESPHomeDumper.represent_int)
|
||||
ESPHomeDumper.add_multi_representer(float, ESPHomeDumper.represent_float)
|
||||
if IS_PY2:
|
||||
ESPHomeDumper.add_multi_representer(unicode, ESPHomeDumper.represent_stringify)
|
||||
ESPHomeDumper.add_multi_representer(long, ESPHomeDumper.represent_int)
|
||||
ESPHomeDumper.add_multi_representer(IPAddress, ESPHomeDumper.represent_stringify)
|
||||
ESPHomeDumper.add_multi_representer(MACAddress, ESPHomeDumper.represent_stringify)
|
||||
ESPHomeDumper.add_multi_representer(TimePeriod, ESPHomeDumper.represent_stringify)
|
||||
|
@ -12,8 +12,6 @@ import time
|
||||
|
||||
import ifaddr
|
||||
|
||||
from esphome.py_compat import indexbytes, text_type
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Some timing constants
|
||||
@ -83,7 +81,7 @@ class IncomingDecodeError(Error):
|
||||
|
||||
|
||||
# pylint: disable=no-init
|
||||
class QuietLogger(object):
|
||||
class QuietLogger:
|
||||
_seen_logs = {}
|
||||
|
||||
@classmethod
|
||||
@ -112,7 +110,7 @@ class QuietLogger(object):
|
||||
logger(*args)
|
||||
|
||||
|
||||
class DNSEntry(object):
|
||||
class DNSEntry:
|
||||
"""A DNS entry"""
|
||||
|
||||
def __init__(self, name, type_, class_):
|
||||
@ -281,7 +279,7 @@ class DNSIncoming(QuietLogger):
|
||||
|
||||
def read_utf(self, offset, length):
|
||||
"""Reads a UTF-8 string of a given length from the packet"""
|
||||
return text_type(self.data[offset:offset + length], 'utf-8', 'replace')
|
||||
return str(self.data[offset:offset + length], 'utf-8', 'replace')
|
||||
|
||||
def read_name(self):
|
||||
"""Reads a domain name from the packet"""
|
||||
@ -291,7 +289,7 @@ class DNSIncoming(QuietLogger):
|
||||
first = off
|
||||
|
||||
while True:
|
||||
length = indexbytes(self.data, off)
|
||||
length = self.data[off]
|
||||
off += 1
|
||||
if length == 0:
|
||||
break
|
||||
@ -302,13 +300,13 @@ class DNSIncoming(QuietLogger):
|
||||
elif t == 0xC0:
|
||||
if next_ < 0:
|
||||
next_ = off + 1
|
||||
off = ((length & 0x3F) << 8) | indexbytes(self.data, off)
|
||||
off = ((length & 0x3F) << 8) | self.data[off]
|
||||
if off >= first:
|
||||
raise IncomingDecodeError(
|
||||
"Bad domain name (circular) at %s" % (off,))
|
||||
f"Bad domain name (circular) at {off}")
|
||||
first = off
|
||||
else:
|
||||
raise IncomingDecodeError("Bad domain name at %s" % (off,))
|
||||
raise IncomingDecodeError(f"Bad domain name at {off}")
|
||||
|
||||
if next_ >= 0:
|
||||
self.offset = next_
|
||||
@ -318,7 +316,7 @@ class DNSIncoming(QuietLogger):
|
||||
return result
|
||||
|
||||
|
||||
class DNSOutgoing(object):
|
||||
class DNSOutgoing:
|
||||
"""Object representation of an outgoing packet"""
|
||||
|
||||
def __init__(self, flags):
|
||||
@ -461,7 +459,7 @@ class Engine(threading.Thread):
|
||||
if reader:
|
||||
reader.handle_read(socket_)
|
||||
|
||||
except (select.error, socket.error) as e:
|
||||
except OSError as e:
|
||||
# If the socket was closed by another thread, during
|
||||
# shutdown, ignore it and exit
|
||||
if e.args[0] != socket.EBADF or not self.zc.done:
|
||||
@ -500,7 +498,7 @@ class Listener(QuietLogger):
|
||||
self.zc.handle_response(msg)
|
||||
|
||||
|
||||
class RecordUpdateListener(object):
|
||||
class RecordUpdateListener:
|
||||
def update_record(self, zc, now, record):
|
||||
raise NotImplementedError()
|
||||
|
||||
@ -578,7 +576,7 @@ class DashboardStatus(RecordUpdateListener, threading.Thread):
|
||||
self.on_update({key: self.host_status(key) for key in self.key_to_host})
|
||||
|
||||
def request_query(self, hosts):
|
||||
self.query_hosts = set(host for host in hosts.values())
|
||||
self.query_hosts = set(hosts.values())
|
||||
self.key_to_host = hosts
|
||||
self.query_event.set()
|
||||
|
||||
@ -605,12 +603,12 @@ class DashboardStatus(RecordUpdateListener, threading.Thread):
|
||||
|
||||
|
||||
def get_all_addresses():
|
||||
return list(set(
|
||||
return list({
|
||||
addr.ip
|
||||
for iface in ifaddr.get_adapters()
|
||||
for addr in iface.ips
|
||||
if addr.is_IPv4 and addr.network_prefix != 32 # Host only netmask 255.255.255.255
|
||||
))
|
||||
})
|
||||
|
||||
|
||||
def new_socket():
|
||||
@ -631,7 +629,7 @@ def new_socket():
|
||||
else:
|
||||
try:
|
||||
s.setsockopt(socket.SOL_SOCKET, reuseport, 1)
|
||||
except (OSError, socket.error) as err:
|
||||
except OSError as err:
|
||||
# OSError on python 3, socket.error on python 2
|
||||
if err.errno != errno.ENOPROTOOPT:
|
||||
raise
|
||||
@ -662,7 +660,7 @@ class Zeroconf(QuietLogger):
|
||||
_value = socket.inet_aton(_MDNS_ADDR) + socket.inet_aton(i)
|
||||
self._listen_socket.setsockopt(
|
||||
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, _value)
|
||||
except socket.error as e:
|
||||
except OSError as e:
|
||||
_errno = e.args[0]
|
||||
if _errno == errno.EADDRINUSE:
|
||||
log.info(
|
||||
|
6
pylintrc
6
pylintrc
@ -25,9 +25,3 @@ disable=
|
||||
stop-iteration-return,
|
||||
no-self-use,
|
||||
import-outside-toplevel,
|
||||
|
||||
|
||||
additional-builtins=
|
||||
unicode,
|
||||
long,
|
||||
raw_input
|
||||
|
@ -3,7 +3,6 @@ PyYAML==5.2
|
||||
paho-mqtt==1.5.0
|
||||
colorlog==4.0.2
|
||||
tornado==5.1.1
|
||||
typing>=3.6.6;python_version<"3.5"
|
||||
protobuf==3.11.1
|
||||
tzlocal==2.0.0
|
||||
pytz==2019.3
|
||||
|
@ -3,7 +3,6 @@ PyYAML==5.2
|
||||
paho-mqtt==1.5.0
|
||||
colorlog==4.0.2
|
||||
tornado==5.1.1
|
||||
typing>=3.6.6;python_version<"3.5"
|
||||
protobuf==3.11.1
|
||||
tzlocal==2.0.0
|
||||
pytz==2019.3
|
||||
|
@ -1,4 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: api_options.proto
|
||||
|
||||
|
@ -39,12 +39,12 @@ content = prot.read_bytes()
|
||||
d = descriptor.FileDescriptorSet.FromString(content)
|
||||
|
||||
|
||||
def indent_list(text, padding=u' '):
|
||||
def indent_list(text, padding=' '):
|
||||
return [padding + line for line in text.splitlines()]
|
||||
|
||||
|
||||
def indent(text, padding=u' '):
|
||||
return u'\n'.join(indent_list(text, padding))
|
||||
def indent(text, padding=' '):
|
||||
return '\n'.join(indent_list(text, padding))
|
||||
|
||||
|
||||
def camel_to_snake(name):
|
||||
@ -432,7 +432,7 @@ class SInt64Type(TypeInfo):
|
||||
|
||||
class RepeatedTypeInfo(TypeInfo):
|
||||
def __init__(self, field):
|
||||
super(RepeatedTypeInfo, self).__init__(field)
|
||||
super().__init__(field)
|
||||
self._ti = TYPE_INFO[field.type](field)
|
||||
|
||||
@property
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import os.path
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import codecs
|
||||
import collections
|
||||
@ -105,7 +104,7 @@ def lint_re_check(regex, **kwargs):
|
||||
err = func(fname, match)
|
||||
if err is None:
|
||||
continue
|
||||
errors.append("{} See line {}.".format(err, lineno))
|
||||
errors.append(f"{err} See line {lineno}.")
|
||||
return errors
|
||||
return decor(new_func)
|
||||
return decorator
|
||||
@ -134,7 +133,7 @@ def lint_ino(fname):
|
||||
return "This file extension (.ino) is not allowed. Please use either .cpp or .h"
|
||||
|
||||
|
||||
@lint_file_check(exclude=['*{}'.format(f) for f in file_types] + [
|
||||
@lint_file_check(exclude=[f'*{f}' for f in file_types] + [
|
||||
'.clang-*', '.dockerignore', '.editorconfig', '*.gitignore', 'LICENSE', 'pylintrc',
|
||||
'MANIFEST.in', 'docker/Dockerfile*', 'docker/rootfs/*', 'script/*',
|
||||
])
|
||||
@ -177,7 +176,7 @@ CPP_RE_EOL = r'\s*?(?://.*?)?$'
|
||||
|
||||
|
||||
def highlight(s):
|
||||
return '\033[36m{}\033[0m'.format(s)
|
||||
return f'\033[36m{s}\033[0m'
|
||||
|
||||
|
||||
@lint_re_check(r'^#define\s+([a-zA-Z0-9_]+)\s+([0-9bx]+)' + CPP_RE_EOL,
|
||||
@ -268,7 +267,7 @@ def lint_constants_usage():
|
||||
def relative_cpp_search_text(fname, content):
|
||||
parts = fname.split('/')
|
||||
integration = parts[2]
|
||||
return '#include "esphome/components/{}'.format(integration)
|
||||
return f'#include "esphome/components/{integration}'
|
||||
|
||||
|
||||
@lint_content_find_check(relative_cpp_search_text, include=['esphome/components/*.cpp'])
|
||||
@ -284,7 +283,7 @@ def lint_relative_cpp_import(fname):
|
||||
def relative_py_search_text(fname, content):
|
||||
parts = fname.split('/')
|
||||
integration = parts[2]
|
||||
return 'esphome.components.{}'.format(integration)
|
||||
return f'esphome.components.{integration}'
|
||||
|
||||
|
||||
@lint_content_find_check(relative_py_search_text, include=['esphome/components/*.py'],
|
||||
@ -303,7 +302,7 @@ def lint_relative_py_import(fname):
|
||||
def lint_namespace(fname, content):
|
||||
expected_name = re.match(r'^esphome/components/([^/]+)/.*',
|
||||
fname.replace(os.path.sep, '/')).group(1)
|
||||
search = 'namespace {}'.format(expected_name)
|
||||
search = f'namespace {expected_name}'
|
||||
if search in content:
|
||||
return None
|
||||
return 'Invalid namespace found in C++ file. All integration C++ files should put all ' \
|
||||
@ -380,7 +379,7 @@ for fname in files:
|
||||
run_checks(LINT_POST_CHECKS, 'POST')
|
||||
|
||||
for f, errs in sorted(errors.items()):
|
||||
print("\033[0;32m************* File \033[1;32m{}\033[0m".format(f))
|
||||
print(f"\033[0;32m************* File \033[1;32m{f}\033[0m")
|
||||
for err in errs:
|
||||
print(err)
|
||||
print()
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
|
@ -12,11 +12,11 @@ temp_header_file = os.path.join(root_path, '.temp-clang-tidy.cpp')
|
||||
|
||||
def shlex_quote(s):
|
||||
if not s:
|
||||
return u"''"
|
||||
return "''"
|
||||
if re.search(r'[^\w@%+=:,./-]', s) is None:
|
||||
return s
|
||||
|
||||
return u"'" + s.replace(u"'", u"'\"'\"'") + u"'"
|
||||
return "'" + s.replace("'", "'\"'\"'") + "'"
|
||||
|
||||
|
||||
def build_all_include():
|
||||
@ -29,7 +29,7 @@ def build_all_include():
|
||||
if ext in filetypes:
|
||||
path = os.path.relpath(path, root_path)
|
||||
include_p = path.replace(os.path.sep, '/')
|
||||
headers.append('#include "{}"'.format(include_p))
|
||||
headers.append(f'#include "{include_p}"')
|
||||
headers.sort()
|
||||
headers.append('')
|
||||
content = '\n'.join(headers)
|
||||
@ -47,7 +47,7 @@ def build_compile_commands():
|
||||
gcc_flags = json.load(f)
|
||||
exec_path = gcc_flags['execPath']
|
||||
include_paths = gcc_flags['gccIncludePaths'].split(',')
|
||||
includes = ['-I{}'.format(p) for p in include_paths]
|
||||
includes = [f'-I{p}' for p in include_paths]
|
||||
cpp_flags = gcc_flags['gccDefaultCppFlags'].split(' ')
|
||||
defines = [flag for flag in cpp_flags if flag.startswith('-D')]
|
||||
command = [exec_path]
|
||||
@ -102,7 +102,7 @@ def splitlines_no_ends(string):
|
||||
|
||||
def changed_files():
|
||||
for remote in ('upstream', 'origin'):
|
||||
command = ['git', 'merge-base', '{}/dev'.format(remote), 'HEAD']
|
||||
command = ['git', 'merge-base', f'{remote}/dev', 'HEAD']
|
||||
try:
|
||||
merge_base = splitlines_no_ends(get_output(*command))[0]
|
||||
break
|
||||
@ -124,7 +124,7 @@ def filter_changed(files):
|
||||
if not files:
|
||||
print(" No changed files!")
|
||||
for c in files:
|
||||
print(" {}".format(c))
|
||||
print(f" {c}")
|
||||
return files
|
||||
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
@ -61,7 +61,7 @@ def main():
|
||||
continue
|
||||
file_ = line[0]
|
||||
linno = line[1]
|
||||
msg = (u':'.join(line[3:])).strip()
|
||||
msg = (':'.join(line[3:])).strip()
|
||||
print_error(file_, linno, msg)
|
||||
errors += 1
|
||||
|
||||
@ -74,7 +74,7 @@ def main():
|
||||
continue
|
||||
file_ = line[0]
|
||||
linno = line[1]
|
||||
msg = (u':'.join(line[2:])).strip()
|
||||
msg = (':'.join(line[2:])).strip()
|
||||
print_error(file_, linno, msg)
|
||||
errors += 1
|
||||
|
||||
|
@ -18,7 +18,6 @@ Topic :: Home Automation
|
||||
|
||||
[flake8]
|
||||
max-line-length = 120
|
||||
builtins = unicode, long, raw_input, basestring
|
||||
exclude = api_pb2.py
|
||||
|
||||
[bdist_wheel]
|
||||
|
5
setup.py
5
setup.py
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
"""esphome setup script."""
|
||||
from setuptools import setup, find_packages
|
||||
import os
|
||||
@ -28,7 +28,6 @@ REQUIRES = [
|
||||
'paho-mqtt==1.5.0',
|
||||
'colorlog==4.0.2',
|
||||
'tornado==5.1.1',
|
||||
'typing>=3.6.6;python_version<"3.6"',
|
||||
'protobuf==3.11.1',
|
||||
'tzlocal==2.0.0',
|
||||
'pytz==2019.3',
|
||||
@ -69,7 +68,7 @@ setup(
|
||||
zip_safe=False,
|
||||
platforms='any',
|
||||
test_suite='tests',
|
||||
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,<4.0',
|
||||
python_requires='>=3.6,<4.0',
|
||||
install_requires=REQUIRES,
|
||||
keywords=['home', 'automation'],
|
||||
entry_points={
|
||||
|
Loading…
Reference in New Issue
Block a user