1
0
mirror of https://github.com/esphome/esphome.git synced 2025-09-01 19:02:18 +01:00
* Add black

Update pre commit

Update pre commit

add empty line

* Format with black
This commit is contained in:
Guillermo Ruffino
2021-03-07 16:03:16 -03:00
committed by GitHub
parent 2b60b0f1fa
commit 69879920eb
398 changed files with 21624 additions and 12644 deletions

View File

@@ -2,12 +2,14 @@
# source: api_options.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
@@ -17,37 +19,38 @@ from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor
DESCRIPTOR = _descriptor.FileDescriptor(
name='api_options.proto',
package='',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x11\x61pi_options.proto\x1a google/protobuf/descriptor.proto\"\x06\n\x04void*F\n\rAPISourceType\x12\x0f\n\x0bSOURCE_BOTH\x10\x00\x12\x11\n\rSOURCE_SERVER\x10\x01\x12\x11\n\rSOURCE_CLIENT\x10\x02:E\n\x16needs_setup_connection\x12\x1e.google.protobuf.MethodOptions\x18\x8e\x08 \x01(\x08:\x04true:C\n\x14needs_authentication\x12\x1e.google.protobuf.MethodOptions\x18\x8f\x08 \x01(\x08:\x04true:/\n\x02id\x12\x1f.google.protobuf.MessageOptions\x18\x8c\x08 \x01(\r:\x01\x30:M\n\x06source\x12\x1f.google.protobuf.MessageOptions\x18\x8d\x08 \x01(\x0e\x32\x0e.APISourceType:\x0bSOURCE_BOTH:/\n\x05ifdef\x12\x1f.google.protobuf.MessageOptions\x18\x8e\x08 \x01(\t:3\n\x03log\x12\x1f.google.protobuf.MessageOptions\x18\x8f\x08 \x01(\x08:\x04true:9\n\x08no_delay\x12\x1f.google.protobuf.MessageOptions\x18\x90\x08 \x01(\x08:\x05\x66\x61lse')
,
dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
name="api_options.proto",
package="",
syntax="proto2",
serialized_options=None,
serialized_pb=_b(
'\n\x11\x61pi_options.proto\x1a google/protobuf/descriptor.proto"\x06\n\x04void*F\n\rAPISourceType\x12\x0f\n\x0bSOURCE_BOTH\x10\x00\x12\x11\n\rSOURCE_SERVER\x10\x01\x12\x11\n\rSOURCE_CLIENT\x10\x02:E\n\x16needs_setup_connection\x12\x1e.google.protobuf.MethodOptions\x18\x8e\x08 \x01(\x08:\x04true:C\n\x14needs_authentication\x12\x1e.google.protobuf.MethodOptions\x18\x8f\x08 \x01(\x08:\x04true:/\n\x02id\x12\x1f.google.protobuf.MessageOptions\x18\x8c\x08 \x01(\r:\x01\x30:M\n\x06source\x12\x1f.google.protobuf.MessageOptions\x18\x8d\x08 \x01(\x0e\x32\x0e.APISourceType:\x0bSOURCE_BOTH:/\n\x05ifdef\x12\x1f.google.protobuf.MessageOptions\x18\x8e\x08 \x01(\t:3\n\x03log\x12\x1f.google.protobuf.MessageOptions\x18\x8f\x08 \x01(\x08:\x04true:9\n\x08no_delay\x12\x1f.google.protobuf.MessageOptions\x18\x90\x08 \x01(\x08:\x05\x66\x61lse'
),
dependencies=[
google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,
],
)
_APISOURCETYPE = _descriptor.EnumDescriptor(
name='APISourceType',
full_name='APISourceType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SOURCE_BOTH', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SOURCE_SERVER', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SOURCE_CLIENT', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=63,
serialized_end=133,
name="APISourceType",
full_name="APISourceType",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="SOURCE_BOTH", index=0, number=0, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="SOURCE_SERVER", index=1, number=1, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="SOURCE_CLIENT", index=2, number=2, serialized_options=None, type=None
),
],
containing_type=None,
serialized_options=None,
serialized_start=63,
serialized_end=133,
)
_sym_db.RegisterEnumDescriptor(_APISOURCETYPE)
@@ -58,105 +61,186 @@ SOURCE_CLIENT = 2
NEEDS_SETUP_CONNECTION_FIELD_NUMBER = 1038
needs_setup_connection = _descriptor.FieldDescriptor(
name='needs_setup_connection', full_name='needs_setup_connection', index=0,
number=1038, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
name="needs_setup_connection",
full_name="needs_setup_connection",
index=0,
number=1038,
type=8,
cpp_type=7,
label=1,
has_default_value=True,
default_value=True,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=True,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
NEEDS_AUTHENTICATION_FIELD_NUMBER = 1039
needs_authentication = _descriptor.FieldDescriptor(
name='needs_authentication', full_name='needs_authentication', index=1,
number=1039, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
name="needs_authentication",
full_name="needs_authentication",
index=1,
number=1039,
type=8,
cpp_type=7,
label=1,
has_default_value=True,
default_value=True,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=True,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
ID_FIELD_NUMBER = 1036
id = _descriptor.FieldDescriptor(
name='id', full_name='id', index=2,
number=1036, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
name="id",
full_name="id",
index=2,
number=1036,
type=13,
cpp_type=3,
label=1,
has_default_value=True,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=True,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
SOURCE_FIELD_NUMBER = 1037
source = _descriptor.FieldDescriptor(
name='source', full_name='source', index=3,
number=1037, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
name="source",
full_name="source",
index=3,
number=1037,
type=14,
cpp_type=8,
label=1,
has_default_value=True,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=True,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
IFDEF_FIELD_NUMBER = 1038
ifdef = _descriptor.FieldDescriptor(
name='ifdef', full_name='ifdef', index=4,
number=1038, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
name="ifdef",
full_name="ifdef",
index=4,
number=1038,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=True,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
LOG_FIELD_NUMBER = 1039
log = _descriptor.FieldDescriptor(
name='log', full_name='log', index=5,
number=1039, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
name="log",
full_name="log",
index=5,
number=1039,
type=8,
cpp_type=7,
label=1,
has_default_value=True,
default_value=True,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=True,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
NO_DELAY_FIELD_NUMBER = 1040
no_delay = _descriptor.FieldDescriptor(
name='no_delay', full_name='no_delay', index=6,
number=1040, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR)
name="no_delay",
full_name="no_delay",
index=6,
number=1040,
type=8,
cpp_type=7,
label=1,
has_default_value=True,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=True,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
_VOID = _descriptor.Descriptor(
name='void',
full_name='void',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=55,
serialized_end=61,
name="void",
full_name="void",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto2",
extension_ranges=[],
oneofs=[],
serialized_start=55,
serialized_end=61,
)
DESCRIPTOR.message_types_by_name['void'] = _VOID
DESCRIPTOR.enum_types_by_name['APISourceType'] = _APISOURCETYPE
DESCRIPTOR.extensions_by_name['needs_setup_connection'] = needs_setup_connection
DESCRIPTOR.extensions_by_name['needs_authentication'] = needs_authentication
DESCRIPTOR.extensions_by_name['id'] = id
DESCRIPTOR.extensions_by_name['source'] = source
DESCRIPTOR.extensions_by_name['ifdef'] = ifdef
DESCRIPTOR.extensions_by_name['log'] = log
DESCRIPTOR.extensions_by_name['no_delay'] = no_delay
DESCRIPTOR.message_types_by_name["void"] = _VOID
DESCRIPTOR.enum_types_by_name["APISourceType"] = _APISOURCETYPE
DESCRIPTOR.extensions_by_name["needs_setup_connection"] = needs_setup_connection
DESCRIPTOR.extensions_by_name["needs_authentication"] = needs_authentication
DESCRIPTOR.extensions_by_name["id"] = id
DESCRIPTOR.extensions_by_name["source"] = source
DESCRIPTOR.extensions_by_name["ifdef"] = ifdef
DESCRIPTOR.extensions_by_name["log"] = log
DESCRIPTOR.extensions_by_name["no_delay"] = no_delay
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
void = _reflection.GeneratedProtocolMessageType('void', (_message.Message,), dict(
DESCRIPTOR = _VOID,
__module__ = 'api_options_pb2'
# @@protoc_insertion_point(class_scope:void)
))
void = _reflection.GeneratedProtocolMessageType(
"void",
(_message.Message,),
dict(
DESCRIPTOR=_VOID,
__module__="api_options_pb2"
# @@protoc_insertion_point(class_scope:void)
),
)
_sym_db.RegisterMessage(void)
google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(needs_setup_connection)
google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(needs_authentication)
google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(
needs_setup_connection
)
google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(
needs_authentication
)
google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(id)
source.enum_type = _APISOURCETYPE
google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(source)

File diff suppressed because it is too large Load Diff

View File

@@ -9,13 +9,14 @@ from esphome.config import get_component, get_platform
from esphome.core import CORE
parser = argparse.ArgumentParser()
parser.add_argument('--check', help="Check if the CODEOWNERS file is up to date.",
action='store_true')
parser.add_argument(
"--check", help="Check if the CODEOWNERS file is up to date.", action="store_true"
)
args = parser.parse_args()
# The root directory of the repo
root = Path(__file__).parent.parent
components_dir = root / 'esphome' / 'components'
components_dir = root / "esphome" / "components"
BASE = """
# This file is generated by script/build_codeowners.py
@@ -43,16 +44,18 @@ codeowners = defaultdict(list)
for path in components_dir.iterdir():
if not path.is_dir():
continue
if not (path / '__init__.py').is_file():
if not (path / "__init__.py").is_file():
continue
name = path.name
comp = get_component(name)
if comp is None:
print(f'Cannot find component {name}. Make sure current path is pip installed ESPHome')
print(
f"Cannot find component {name}. Make sure current path is pip installed ESPHome"
)
sys.exit(1)
codeowners[f'esphome/components/{name}/*'].extend(comp.codeowners)
codeowners[f"esphome/components/{name}/*"].extend(comp.codeowners)
for platform_path in path.iterdir():
platform_name = platform_path.stem
@@ -62,15 +65,17 @@ for path in components_dir.iterdir():
if platform_path.is_dir():
# Sub foldered platforms get their own line
if not (platform_path / '__init__.py').is_file():
if not (platform_path / "__init__.py").is_file():
continue
codeowners[f'esphome/components/{name}/{platform_name}/*'].extend(platform.codeowners)
codeowners[f"esphome/components/{name}/{platform_name}/*"].extend(
platform.codeowners
)
continue
# Non-subfoldered platforms add to codeowners at component level
if not platform_path.is_file() or platform_path.name == '__init__.py':
if not platform_path.is_file() or platform_path.name == "__init__.py":
continue
codeowners[f'esphome/components/{name}/*'].extend(platform.codeowners)
codeowners[f"esphome/components/{name}/*"].extend(platform.codeowners)
for path, owners in sorted(codeowners.items()):
@@ -78,16 +83,18 @@ for path, owners in sorted(codeowners.items()):
if not owners:
continue
for owner in owners:
if not owner.startswith('@'):
print(f"Codeowner {owner} for integration {path} must start with an '@' symbol!")
if not owner.startswith("@"):
print(
f"Codeowner {owner} for integration {path} must start with an '@' symbol!"
)
sys.exit(1)
parts.append(f"{path} {' '.join(owners)}")
# End newline
parts.append('')
content = '\n'.join(parts)
codeowners_file = root / 'CODEOWNERS'
parts.append("")
content = "\n".join(parts)
codeowners_file = root / "CODEOWNERS"
if args.check:
if codeowners_file.read_text() != content:

View File

@@ -12,5 +12,5 @@ def main():
print("Done.")
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@@ -17,36 +17,32 @@ def sub(path, pattern, repl, expected_count=1):
def write_version(version: str):
for p in [
".github/workflows/ci-docker.yml",
".github/workflows/release-dev.yml",
".github/workflows/release.yml"
".github/workflows/ci-docker.yml",
".github/workflows/release-dev.yml",
".github/workflows/release.yml",
]:
sub(
p,
r'base_version=".*"',
f'base_version="{version}"'
)
sub(p, r'base_version=".*"', f'base_version="{version}"')
sub(
"docker/Dockerfile",
r"ARG BUILD_FROM=esphome/esphome-base-amd64:.*",
f"ARG BUILD_FROM=esphome/esphome-base-amd64:{version}"
f"ARG BUILD_FROM=esphome/esphome-base-amd64:{version}",
)
sub(
"docker/Dockerfile.dev",
r"FROM esphome/esphome-base-amd64:.*",
f"FROM esphome/esphome-base-amd64:{version}"
f"FROM esphome/esphome-base-amd64:{version}",
)
sub(
"docker/Dockerfile.lint",
r"FROM esphome/esphome-lint-base:.*",
f"FROM esphome/esphome-lint-base:{version}"
f"FROM esphome/esphome-lint-base:{version}",
)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('new_version', type=str)
parser.add_argument("new_version", type=str)
args = parser.parse_args()
version = args.new_version

View File

@@ -16,30 +16,27 @@ class Version:
dev: bool = False
def __str__(self):
return f'{self.major}.{self.minor}.{self.full_patch}'
return f"{self.major}.{self.minor}.{self.full_patch}"
@property
def full_patch(self):
res = f'{self.patch}'
res = f"{self.patch}"
if self.beta > 0:
res += f'b{self.beta}'
res += f"b{self.beta}"
if self.dev:
res += '-dev'
res += "-dev"
return res
@classmethod
def parse(cls, value):
match = re.match(r'(\d+).(\d+).(\d+)(b\d+)?(-dev)?', value)
match = re.match(r"(\d+).(\d+).(\d+)(b\d+)?(-dev)?", value)
assert match is not None
major = int(match[1])
minor = int(match[2])
patch = int(match[3])
beta = int(match[4][1:]) if match[4] else 0
dev = bool(match[5])
return Version(
major=major, minor=minor, patch=patch,
beta=beta, dev=dev
)
return Version(major=major, minor=minor, patch=patch, beta=beta, dev=dev)
def sub(path, pattern, repl, expected_count=1):
@@ -54,25 +51,21 @@ def sub(path, pattern, repl, expected_count=1):
def write_version(version: Version):
sub(
'esphome/const.py',
r"^MAJOR_VERSION = \d+$",
f"MAJOR_VERSION = {version.major}"
"esphome/const.py", r"^MAJOR_VERSION = \d+$", f"MAJOR_VERSION = {version.major}"
)
sub(
'esphome/const.py',
r"^MINOR_VERSION = \d+$",
f"MINOR_VERSION = {version.minor}"
"esphome/const.py", r"^MINOR_VERSION = \d+$", f"MINOR_VERSION = {version.minor}"
)
sub(
'esphome/const.py',
"esphome/const.py",
r"^PATCH_VERSION = .*$",
f"PATCH_VERSION = '{version.full_patch}'"
f"PATCH_VERSION = '{version.full_patch}'",
)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('new_version', type=str)
parser.add_argument("new_version", type=str)
args = parser.parse_args()
version = Version.parse(args.new_version)

View File

@@ -14,6 +14,7 @@ import argparse
sys.path.append(os.path.dirname(__file__))
from helpers import git_ls_files, filter_changed
def find_all(a_str, sub):
if not a_str.find(sub):
# Optimization: If str is not in whole text, then do not try
@@ -30,18 +31,21 @@ def find_all(a_str, sub):
parser = argparse.ArgumentParser()
parser.add_argument('files', nargs='*', default=[],
help='files to be processed (regex on path)')
parser.add_argument('-c', '--changed', action='store_true',
help='Only run on changed files')
parser.add_argument('--print-slowest', action='store_true',
help='Print the slowest checks')
parser.add_argument(
"files", nargs="*", default=[], help="files to be processed (regex on path)"
)
parser.add_argument(
"-c", "--changed", action="store_true", help="Only run on changed files"
)
parser.add_argument(
"--print-slowest", action="store_true", help="Print the slowest checks"
)
args = parser.parse_args()
EXECUTABLE_BIT = git_ls_files()
files = list(EXECUTABLE_BIT.keys())
# Match against re
file_name_re = re.compile('|'.join(args.files))
file_name_re = re.compile("|".join(args.files))
files = [p for p in files if file_name_re.search(p)]
if args.changed:
@@ -49,11 +53,32 @@ if args.changed:
files.sort()
file_types = ('.h', '.c', '.cpp', '.tcc', '.yaml', '.yml', '.ini', '.txt', '.ico', '.svg',
'.py', '.html', '.js', '.md', '.sh', '.css', '.proto', '.conf', '.cfg',
'.woff', '.woff2', '')
cpp_include = ('*.h', '*.c', '*.cpp', '*.tcc')
ignore_types = ('.ico', '.woff', '.woff2', '')
file_types = (
".h",
".c",
".cpp",
".tcc",
".yaml",
".yml",
".ini",
".txt",
".ico",
".svg",
".py",
".html",
".js",
".md",
".sh",
".css",
".proto",
".conf",
".cfg",
".woff",
".woff2",
"",
)
cpp_include = ("*.h", "*.c", "*.cpp", "*.tcc")
ignore_types = (".ico", ".woff", ".woff2", "")
LINT_FILE_CHECKS = []
LINT_CONTENT_CHECKS = []
@@ -61,9 +86,9 @@ LINT_POST_CHECKS = []
def run_check(lint_obj, fname, *args):
include = lint_obj['include']
exclude = lint_obj['exclude']
func = lint_obj['func']
include = lint_obj["include"]
exclude = lint_obj["exclude"]
func = lint_obj["func"]
if include is not None:
for incl in include:
if fnmatch.fnmatch(fname, incl):
@@ -85,21 +110,24 @@ def run_checks(lints, fname, *args):
print(f"Check {lint['func'].__name__} on file {fname} failed:")
raise
duration = time.process_time() - start
lint.setdefault('durations', []).append(duration)
lint.setdefault("durations", []).append(duration)
def _add_check(checks, func, include=None, exclude=None):
checks.append({
'include': include,
'exclude': exclude or [],
'func': func,
})
checks.append(
{
"include": include,
"exclude": exclude or [],
"func": func,
}
)
def lint_file_check(**kwargs):
def decorator(func):
_add_check(LINT_FILE_CHECKS, func, **kwargs)
return func
return decorator
@@ -107,6 +135,7 @@ def lint_content_check(**kwargs):
def decorator(func):
_add_check(LINT_CONTENT_CHECKS, func, **kwargs)
return func
return decorator
@@ -116,7 +145,7 @@ def lint_post_check(func):
def lint_re_check(regex, **kwargs):
flags = kwargs.pop('flags', re.MULTILINE)
flags = kwargs.pop("flags", re.MULTILINE)
prog = re.compile(regex, flags)
decor = lint_content_check(**kwargs)
@@ -125,18 +154,19 @@ def lint_re_check(regex, **kwargs):
def new_func(fname, content):
errors = []
for match in prog.finditer(content):
if 'NOLINT' in match.group(0):
if "NOLINT" in match.group(0):
continue
lineno = content.count("\n", 0, match.start()) + 1
substr = content[:match.start()]
col = len(substr) - substr.rfind('\n')
substr = content[: match.start()]
col = len(substr) - substr.rfind("\n")
err = func(fname, match)
if err is None:
continue
errors.append((lineno, col+1, err))
errors.append((lineno, col + 1, err))
return errors
return decor(new_func)
return decorator
@@ -152,73 +182,99 @@ def lint_content_find_check(find, **kwargs):
errors = []
for line, col in find_all(content, find_):
err = func(fname)
errors.append((line+1, col+1, err))
errors.append((line + 1, col + 1, err))
return errors
return decor(new_func)
return decorator
@lint_file_check(include=['*.ino'])
@lint_file_check(include=["*.ino"])
def lint_ino(fname):
return "This file extension (.ino) is not allowed. Please use either .cpp or .h"
@lint_file_check(exclude=[f'*{f}' for f in file_types] + [
'.clang-*', '.dockerignore', '.editorconfig', '*.gitignore', 'LICENSE', 'pylintrc',
'MANIFEST.in', 'docker/Dockerfile*', 'docker/rootfs/*', 'script/*',
])
@lint_file_check(
exclude=[f"*{f}" for f in file_types]
+ [
".clang-*",
".dockerignore",
".editorconfig",
"*.gitignore",
"LICENSE",
"pylintrc",
"MANIFEST.in",
"docker/Dockerfile*",
"docker/rootfs/*",
"script/*",
]
)
def lint_ext_check(fname):
return "This file extension is not a registered file type. If this is an error, please " \
"update the script/ci-custom.py script."
return (
"This file extension is not a registered file type. If this is an error, please "
"update the script/ci-custom.py script."
)
@lint_file_check(exclude=[
'docker/rootfs/*', 'script/*', 'setup.py'
])
@lint_file_check(exclude=["docker/rootfs/*", "script/*", "setup.py"])
def lint_executable_bit(fname):
ex = EXECUTABLE_BIT[fname]
if ex != 100644:
return 'File has invalid executable bit {}. If running from a windows machine please ' \
'see disabling executable bit in git.'.format(ex)
return (
"File has invalid executable bit {}. If running from a windows machine please "
"see disabling executable bit in git.".format(ex)
)
return None
@lint_content_find_check('\t', exclude=[
'esphome/dashboard/static/ace.js', 'esphome/dashboard/static/ext-searchbox.js',
])
@lint_content_find_check(
"\t",
exclude=[
"esphome/dashboard/static/ace.js",
"esphome/dashboard/static/ext-searchbox.js",
],
)
def lint_tabs(fname):
return "File contains tab character. Please convert tabs to spaces."
@lint_content_find_check('\r')
@lint_content_find_check("\r")
def lint_newline(fname):
return "File contains windows newline. Please set your editor to unix newline mode."
@lint_content_check(exclude=['*.svg'])
@lint_content_check(exclude=["*.svg"])
def lint_end_newline(fname, content):
if content and not content.endswith('\n'):
if content and not content.endswith("\n"):
return "File does not end with a newline, please add an empty line at the end of the file."
return None
CPP_RE_EOL = r'\s*?(?://.*?)?$'
CPP_RE_EOL = r"\s*?(?://.*?)?$"
def highlight(s):
return f'\033[36m{s}\033[0m'
return f"\033[36m{s}\033[0m"
@lint_re_check(r'^#define\s+([a-zA-Z0-9_]+)\s+([0-9bx]+)' + CPP_RE_EOL,
include=cpp_include, exclude=['esphome/core/log.h'])
@lint_re_check(
r"^#define\s+([a-zA-Z0-9_]+)\s+([0-9bx]+)" + CPP_RE_EOL,
include=cpp_include,
exclude=["esphome/core/log.h"],
)
def lint_no_defines(fname, match):
s = highlight('static const uint8_t {} = {};'.format(match.group(1), match.group(2)))
return ("#define macros for integer constants are not allowed, please use "
"{} style instead (replace uint8_t with the appropriate "
"datatype). See also Google style guide.".format(s))
s = highlight(
"static const uint8_t {} = {};".format(match.group(1), match.group(2))
)
return (
"#define macros for integer constants are not allowed, please use "
"{} style instead (replace uint8_t with the appropriate "
"datatype). See also Google style guide.".format(s)
)
@lint_re_check(r'^\s*delay\((\d+)\);' + CPP_RE_EOL, include=cpp_include)
@lint_re_check(r"^\s*delay\((\d+)\);" + CPP_RE_EOL, include=cpp_include)
def lint_no_long_delays(fname, match):
duration_ms = int(match.group(1))
if duration_ms < 50:
@@ -232,7 +288,7 @@ def lint_no_long_delays(fname, match):
)
@lint_content_check(include=['esphome/const.py'])
@lint_content_check(include=["esphome/const.py"])
def lint_const_ordered(fname, content):
"""Lint that value in const.py are ordered.
@@ -240,54 +296,67 @@ def lint_const_ordered(fname, content):
"""
lines = content.splitlines()
errors = []
for start in ['CONF_', 'ICON_', 'UNIT_']:
matching = [(i+1, line) for i, line in enumerate(lines) if line.startswith(start)]
ordered = list(sorted(matching, key=lambda x: x[1].replace('_', ' ')))
for start in ["CONF_", "ICON_", "UNIT_"]:
matching = [
(i + 1, line) for i, line in enumerate(lines) if line.startswith(start)
]
ordered = list(sorted(matching, key=lambda x: x[1].replace("_", " ")))
ordered = [(mi, ol) for (mi, _), (_, ol) in zip(matching, ordered)]
for (mi, ml), (oi, ol) in zip(matching, ordered):
if ml == ol:
continue
target = next(i for i, l in ordered if l == ml)
target_text = next(l for i, l in matching if target == i)
errors.append((mi, 1,
f"Constant {highlight(ml)} is not ordered, please make sure all "
f"constants are ordered. See line {mi} (should go to line {target}, "
f"{target_text})"))
errors.append(
(
mi,
1,
f"Constant {highlight(ml)} is not ordered, please make sure all "
f"constants are ordered. See line {mi} (should go to line {target}, "
f"{target_text})",
)
)
return errors
@lint_re_check(r'^\s*CONF_([A-Z_0-9a-z]+)\s+=\s+[\'"](.*?)[\'"]\s*?$', include=['*.py'])
@lint_re_check(r'^\s*CONF_([A-Z_0-9a-z]+)\s+=\s+[\'"](.*?)[\'"]\s*?$', include=["*.py"])
def lint_conf_matches(fname, match):
const = match.group(1)
value = match.group(2)
const_norm = const.lower()
value_norm = value.replace('.', '_')
value_norm = value.replace(".", "_")
if const_norm == value_norm:
return None
return ("Constant {} does not match value {}! Please make sure the constant's name matches its "
"value!"
"".format(highlight('CONF_' + const), highlight(value)))
return (
"Constant {} does not match value {}! Please make sure the constant's name matches its "
"value!"
"".format(highlight("CONF_" + const), highlight(value))
)
CONF_RE = r'^(CONF_[a-zA-Z0-9_]+)\s*=\s*[\'"].*?[\'"]\s*?$'
with codecs.open('esphome/const.py', 'r', encoding='utf-8') as f_handle:
with codecs.open("esphome/const.py", "r", encoding="utf-8") as f_handle:
constants_content = f_handle.read()
CONSTANTS = [m.group(1) for m in re.finditer(CONF_RE, constants_content, re.MULTILINE)]
CONSTANTS_USES = collections.defaultdict(list)
@lint_re_check(CONF_RE, include=['*.py'], exclude=['esphome/const.py'])
@lint_re_check(CONF_RE, include=["*.py"], exclude=["esphome/const.py"])
def lint_conf_from_const_py(fname, match):
name = match.group(1)
if name not in CONSTANTS:
CONSTANTS_USES[name].append(fname)
return None
return ("Constant {} has already been defined in const.py - please import the constant from "
"const.py directly.".format(highlight(name)))
return (
"Constant {} has already been defined in const.py - please import the constant from "
"const.py directly.".format(highlight(name))
)
RAW_PIN_ACCESS_RE = r'^\s(pinMode|digitalWrite|digitalRead)\((.*)->get_pin\(\),\s*([^)]+).*\)'
RAW_PIN_ACCESS_RE = (
r"^\s(pinMode|digitalWrite|digitalRead)\((.*)->get_pin\(\),\s*([^)]+).*\)"
)
@lint_re_check(RAW_PIN_ACCESS_RE, include=cpp_include)
@@ -296,33 +365,49 @@ def lint_no_raw_pin_access(fname, match):
pin = match.group(2)
mode = match.group(3)
new_func = {
'pinMode': 'pin_mode',
'digitalWrite': 'digital_write',
'digitalRead': 'digital_read',
"pinMode": "pin_mode",
"digitalWrite": "digital_write",
"digitalRead": "digital_read",
}[func]
new_code = highlight(f'{pin}->{new_func}({mode})')
return (f"Don't use raw {func} calls. Instead, use the `->{new_func}` function: {new_code}")
new_code = highlight(f"{pin}->{new_func}({mode})")
return f"Don't use raw {func} calls. Instead, use the `->{new_func}` function: {new_code}"
# Functions from Arduino framework that are forbidden to use directly
ARDUINO_FORBIDDEN = [
'digitalWrite', 'digitalRead', 'pinMode',
'shiftOut', 'shiftIn',
'radians', 'degrees',
'interrupts', 'noInterrupts',
'lowByte', 'highByte',
'bitRead', 'bitSet', 'bitClear', 'bitWrite',
'bit', 'analogRead', 'analogWrite',
'pulseIn', 'pulseInLong',
'tone',
"digitalWrite",
"digitalRead",
"pinMode",
"shiftOut",
"shiftIn",
"radians",
"degrees",
"interrupts",
"noInterrupts",
"lowByte",
"highByte",
"bitRead",
"bitSet",
"bitClear",
"bitWrite",
"bit",
"analogRead",
"analogWrite",
"pulseIn",
"pulseInLong",
"tone",
]
ARDUINO_FORBIDDEN_RE = r'[^\w\d](' + r'|'.join(ARDUINO_FORBIDDEN) + r')\(.*'
ARDUINO_FORBIDDEN_RE = r"[^\w\d](" + r"|".join(ARDUINO_FORBIDDEN) + r")\(.*"
@lint_re_check(ARDUINO_FORBIDDEN_RE, include=cpp_include, exclude=[
'esphome/components/mqtt/custom_mqtt_device.h',
'esphome/core/esphal.*',
])
@lint_re_check(
ARDUINO_FORBIDDEN_RE,
include=cpp_include,
exclude=[
"esphome/components/mqtt/custom_mqtt_device.h",
"esphome/core/esphal.*",
],
)
def lint_no_arduino_framework_functions(fname, match):
nolint = highlight("// NOLINT")
return (
@@ -334,9 +419,13 @@ def lint_no_arduino_framework_functions(fname, match):
)
@lint_re_check(r'[^\w\d]byte\s+[\w\d]+\s*=', include=cpp_include, exclude={
'esphome/components/tuya/tuya.h',
})
@lint_re_check(
r"[^\w\d]byte\s+[\w\d]+\s*=",
include=cpp_include,
exclude={
"esphome/components/tuya/tuya.h",
},
)
def lint_no_byte_datatype(fname, match):
return (
f"The datatype {highlight('byte')} is not allowed to be used in ESPHome. "
@@ -350,112 +439,143 @@ def lint_constants_usage():
for constant, uses in CONSTANTS_USES.items():
if len(uses) < 4:
continue
errors.append("Constant {} is defined in {} files. Please move all definitions of the "
"constant to const.py (Uses: {})"
"".format(highlight(constant), len(uses), ', '.join(uses)))
errors.append(
"Constant {} is defined in {} files. Please move all definitions of the "
"constant to const.py (Uses: {})"
"".format(highlight(constant), len(uses), ", ".join(uses))
)
return errors
def relative_cpp_search_text(fname, content):
parts = fname.split('/')
parts = fname.split("/")
integration = parts[2]
return f'#include "esphome/components/{integration}'
@lint_content_find_check(relative_cpp_search_text, include=['esphome/components/*.cpp'])
@lint_content_find_check(relative_cpp_search_text, include=["esphome/components/*.cpp"])
def lint_relative_cpp_import(fname):
return ("Component contains absolute import - Components must always use "
"relative imports.\n"
"Change:\n"
' #include "esphome/components/abc/abc.h"\n'
'to:\n'
' #include "abc.h"\n\n')
return (
"Component contains absolute import - Components must always use "
"relative imports.\n"
"Change:\n"
' #include "esphome/components/abc/abc.h"\n'
"to:\n"
' #include "abc.h"\n\n'
)
def relative_py_search_text(fname, content):
parts = fname.split('/')
parts = fname.split("/")
integration = parts[2]
return f'esphome.components.{integration}'
return f"esphome.components.{integration}"
@lint_content_find_check(relative_py_search_text, include=['esphome/components/*.py'],
exclude=['esphome/components/web_server/__init__.py'])
@lint_content_find_check(
relative_py_search_text,
include=["esphome/components/*.py"],
exclude=["esphome/components/web_server/__init__.py"],
)
def lint_relative_py_import(fname):
return ("Component contains absolute import - Components must always use "
"relative imports within the integration.\n"
"Change:\n"
' from esphome.components.abc import abc_ns"\n'
'to:\n'
' from . import abc_ns\n\n')
return (
"Component contains absolute import - Components must always use "
"relative imports within the integration.\n"
"Change:\n"
' from esphome.components.abc import abc_ns"\n'
"to:\n"
" from . import abc_ns\n\n"
)
@lint_content_check(include=['esphome/components/*.h', 'esphome/components/*.cpp',
'esphome/components/*.tcc'])
@lint_content_check(
include=[
"esphome/components/*.h",
"esphome/components/*.cpp",
"esphome/components/*.tcc",
]
)
def lint_namespace(fname, content):
expected_name = re.match(r'^esphome/components/([^/]+)/.*',
fname.replace(os.path.sep, '/')).group(1)
search = f'namespace {expected_name}'
expected_name = re.match(
r"^esphome/components/([^/]+)/.*", fname.replace(os.path.sep, "/")
).group(1)
search = f"namespace {expected_name}"
if search in content:
return None
return 'Invalid namespace found in C++ file. All integration C++ files should put all ' \
'functions in a separate namespace that matches the integration\'s name. ' \
'Please make sure the file contains {}'.format(highlight(search))
return (
"Invalid namespace found in C++ file. All integration C++ files should put all "
"functions in a separate namespace that matches the integration's name. "
"Please make sure the file contains {}".format(highlight(search))
)
@lint_content_find_check('"esphome.h"', include=cpp_include, exclude=['tests/custom.h'])
@lint_content_find_check('"esphome.h"', include=cpp_include, exclude=["tests/custom.h"])
def lint_esphome_h(fname):
return ("File contains reference to 'esphome.h' - This file is "
"auto-generated and should only be used for *custom* "
"components. Please replace with references to the direct files.")
return (
"File contains reference to 'esphome.h' - This file is "
"auto-generated and should only be used for *custom* "
"components. Please replace with references to the direct files."
)
@lint_content_check(include=['*.h'])
@lint_content_check(include=["*.h"])
def lint_pragma_once(fname, content):
if '#pragma once' not in content:
return ("Header file contains no 'pragma once' header guard. Please add a "
"'#pragma once' line at the top of the file.")
if "#pragma once" not in content:
return (
"Header file contains no 'pragma once' header guard. Please add a "
"'#pragma once' line at the top of the file."
)
return None
@lint_re_check(r'(whitelist|blacklist|slave)',
exclude=['script/ci-custom.py'], flags=re.IGNORECASE | re.MULTILINE)
@lint_re_check(
r"(whitelist|blacklist|slave)",
exclude=["script/ci-custom.py"],
flags=re.IGNORECASE | re.MULTILINE,
)
def lint_inclusive_language(fname, match):
# From https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/commit/?id=49decddd39e5f6132ccd7d9fdc3d7c470b0061bb
return ("Avoid the use of whitelist/blacklist/slave.\n"
"Recommended replacements for 'master / slave' are:\n"
" '{primary,main} / {secondary,replica,subordinate}\n"
" '{initiator,requester} / {target,responder}'\n"
" '{controller,host} / {device,worker,proxy}'\n"
" 'leader / follower'\n"
" 'director / performer'\n"
"\n"
"Recommended replacements for 'blacklist/whitelist' are:\n"
" 'denylist / allowlist'\n"
" 'blocklist / passlist'")
return (
"Avoid the use of whitelist/blacklist/slave.\n"
"Recommended replacements for 'master / slave' are:\n"
" '{primary,main} / {secondary,replica,subordinate}\n"
" '{initiator,requester} / {target,responder}'\n"
" '{controller,host} / {device,worker,proxy}'\n"
" 'leader / follower'\n"
" 'director / performer'\n"
"\n"
"Recommended replacements for 'blacklist/whitelist' are:\n"
" 'denylist / allowlist'\n"
" 'blocklist / passlist'"
)
@lint_content_find_check('ESP_LOG', include=['*.h', '*.tcc'], exclude=[
'esphome/components/binary_sensor/binary_sensor.h',
'esphome/components/cover/cover.h',
'esphome/components/display/display_buffer.h',
'esphome/components/i2c/i2c.h',
'esphome/components/mqtt/mqtt_component.h',
'esphome/components/output/binary_output.h',
'esphome/components/output/float_output.h',
'esphome/components/sensor/sensor.h',
'esphome/components/stepper/stepper.h',
'esphome/components/switch/switch.h',
'esphome/components/text_sensor/text_sensor.h',
'esphome/components/climate/climate.h',
'esphome/core/component.h',
'esphome/core/esphal.h',
'esphome/core/log.h',
'tests/custom.h',
])
@lint_content_find_check(
"ESP_LOG",
include=["*.h", "*.tcc"],
exclude=[
"esphome/components/binary_sensor/binary_sensor.h",
"esphome/components/cover/cover.h",
"esphome/components/display/display_buffer.h",
"esphome/components/i2c/i2c.h",
"esphome/components/mqtt/mqtt_component.h",
"esphome/components/output/binary_output.h",
"esphome/components/output/float_output.h",
"esphome/components/sensor/sensor.h",
"esphome/components/stepper/stepper.h",
"esphome/components/switch/switch.h",
"esphome/components/text_sensor/text_sensor.h",
"esphome/components/climate/climate.h",
"esphome/core/component.h",
"esphome/core/esphal.h",
"esphome/core/log.h",
"tests/custom.h",
],
)
def lint_log_in_header(fname):
return ('Found reference to ESP_LOG in header file. Using ESP_LOG* in header files '
'is currently not possible - please move the definition to a source file (.cpp)')
return (
"Found reference to ESP_LOG in header file. Using ESP_LOG* in header files "
"is currently not possible - please move the definition to a source file (.cpp)"
)
errors = collections.defaultdict(list)
@@ -488,14 +608,17 @@ for fname in files:
if ext in ignore_types:
continue
try:
with codecs.open(fname, 'r', encoding='utf-8') as f_handle:
with codecs.open(fname, "r", encoding="utf-8") as f_handle:
content = f_handle.read()
except UnicodeDecodeError:
add_errors(fname, "File is not readable as UTF-8. Please set your editor to UTF-8 mode.")
add_errors(
fname,
"File is not readable as UTF-8. Please set your editor to UTF-8 mode.",
)
continue
run_checks(LINT_CONTENT_CHECKS, fname, fname, content)
run_checks(LINT_POST_CHECKS, 'POST')
run_checks(LINT_POST_CHECKS, "POST")
for f, errs in sorted(errors.items()):
print(f"\033[0;32m************* File \033[1;32m{f}\033[0m")
@@ -506,8 +629,8 @@ for f, errs in sorted(errors.items()):
if args.print_slowest:
lint_times = []
for lint in LINT_FILE_CHECKS + LINT_CONTENT_CHECKS + LINT_POST_CHECKS:
durations = lint.get('durations', [])
lint_times.append((sum(durations), len(durations), lint['func'].__name__))
durations = lint.get("durations", [])
lint_times.append((sum(durations), len(durations), lint["func"].__name__))
lint_times.sort(key=lambda x: -x[0])
for i in range(min(len(lint_times), 10)):
dur, invocations, name = lint_times[i]

View File

@@ -5,15 +5,15 @@ import re
import subprocess
import sys
root_path = os.path.abspath(os.path.normpath(os.path.join(__file__, '..', '..')))
basepath = os.path.join(root_path, 'esphome')
temp_header_file = os.path.join(root_path, '.temp-clang-tidy.cpp')
root_path = os.path.abspath(os.path.normpath(os.path.join(__file__, "..", "..")))
basepath = os.path.join(root_path, "esphome")
temp_header_file = os.path.join(root_path, ".temp-clang-tidy.cpp")
def shlex_quote(s):
if not s:
return "''"
if re.search(r'[^\w@%+=:,./-]', s) is None:
if re.search(r"[^\w@%+=:,./-]", s) is None:
return s
return "'" + s.replace("'", "'\"'\"'") + "'"
@@ -24,63 +24,71 @@ def build_all_include():
# Otherwise header-only integrations would not be tested by clang-tidy
headers = []
for path in walk_files(basepath):
filetypes = ('.h',)
filetypes = (".h",)
ext = os.path.splitext(path)[1]
if ext in filetypes:
path = os.path.relpath(path, root_path)
include_p = path.replace(os.path.sep, '/')
include_p = path.replace(os.path.sep, "/")
headers.append(f'#include "{include_p}"')
headers.sort()
headers.append('')
content = '\n'.join(headers)
with codecs.open(temp_header_file, 'w', encoding='utf-8') as f:
headers.append("")
content = "\n".join(headers)
with codecs.open(temp_header_file, "w", encoding="utf-8") as f:
f.write(content)
def build_compile_commands():
gcc_flags_json = os.path.join(root_path, '.gcc-flags.json')
gcc_flags_json = os.path.join(root_path, ".gcc-flags.json")
if not os.path.isfile(gcc_flags_json):
print("Could not find {} file which is required for clang-tidy.")
print('Please run "pio init --ide atom" in the root esphome folder to generate that file.')
print(
'Please run "pio init --ide atom" in the root esphome folder to generate that file.'
)
sys.exit(1)
with codecs.open(gcc_flags_json, 'r', encoding='utf-8') as f:
with codecs.open(gcc_flags_json, "r", encoding="utf-8") as f:
gcc_flags = json.load(f)
exec_path = gcc_flags['execPath']
include_paths = gcc_flags['gccIncludePaths'].split(',')
includes = [f'-I{p}' for p in include_paths]
cpp_flags = gcc_flags['gccDefaultCppFlags'].split(' ')
defines = [flag for flag in cpp_flags if flag.startswith('-D')]
exec_path = gcc_flags["execPath"]
include_paths = gcc_flags["gccIncludePaths"].split(",")
includes = [f"-I{p}" for p in include_paths]
cpp_flags = gcc_flags["gccDefaultCppFlags"].split(" ")
defines = [flag for flag in cpp_flags if flag.startswith("-D")]
command = [exec_path]
command.extend(includes)
command.extend(defines)
command.append('-std=gnu++11')
command.append('-Wall')
command.append('-Wno-delete-non-virtual-dtor')
command.append('-Wno-unused-variable')
command.append('-Wunreachable-code')
command.append("-std=gnu++11")
command.append("-Wall")
command.append("-Wno-delete-non-virtual-dtor")
command.append("-Wno-unused-variable")
command.append("-Wunreachable-code")
source_files = []
for path in walk_files(basepath):
filetypes = ('.cpp',)
filetypes = (".cpp",)
ext = os.path.splitext(path)[1]
if ext in filetypes:
source_files.append(os.path.abspath(path))
source_files.append(temp_header_file)
source_files.sort()
compile_commands = [{
'directory': root_path,
'command': ' '.join(shlex_quote(x) for x in (command + ['-o', p + '.o', '-c', p])),
'file': p
} for p in source_files]
compile_commands_json = os.path.join(root_path, 'compile_commands.json')
compile_commands = [
{
"directory": root_path,
"command": " ".join(
shlex_quote(x) for x in (command + ["-o", p + ".o", "-c", p])
),
"file": p,
}
for p in source_files
]
compile_commands_json = os.path.join(root_path, "compile_commands.json")
if os.path.isfile(compile_commands_json):
with codecs.open(compile_commands_json, 'r', encoding='utf-8') as f:
with codecs.open(compile_commands_json, "r", encoding="utf-8") as f:
try:
if json.load(f) == compile_commands:
return
# pylint: disable=bare-except
except:
pass
with codecs.open(compile_commands_json, 'w', encoding='utf-8') as f:
with codecs.open(compile_commands_json, "w", encoding="utf-8") as f:
json.dump(compile_commands, f, indent=2)
@@ -93,7 +101,13 @@ def walk_files(path):
def get_output(*args):
proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = proc.communicate()
return output.decode('utf-8')
return output.decode("utf-8")
def get_err(*args):
proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = proc.communicate()
return err.decode("utf-8")
def splitlines_no_ends(string):
@@ -101,18 +115,19 @@ def splitlines_no_ends(string):
def changed_files():
check_remotes = ['upstream', 'origin']
check_remotes.extend(splitlines_no_ends(get_output('git', 'remote')))
check_remotes = ["upstream", "origin"]
check_remotes.extend(splitlines_no_ends(get_output("git", "remote")))
for remote in check_remotes:
command = ['git', 'merge-base', f'refs/remotes/{remote}/dev', 'HEAD']
command = ["git", "merge-base", f"refs/remotes/{remote}/dev", "HEAD"]
try:
merge_base = splitlines_no_ends(get_output(*command))[0]
break
# pylint: disable=bare-except
except:
pass
else:
raise ValueError("Git not configured")
command = ['git', 'diff', merge_base, '--name-only']
command = ["git", "diff", merge_base, "--name-only"]
changed = splitlines_no_ends(get_output(*command))
changed = [os.path.relpath(f, os.getcwd()) for f in changed]
changed.sort()
@@ -131,10 +146,8 @@ def filter_changed(files):
def git_ls_files():
command = ['git', 'ls-files', '-s']
command = ["git", "ls-files", "-s"]
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
output, err = proc.communicate()
lines = [x.split() for x in output.decode('utf-8').splitlines()]
return {
s[3].strip(): int(s[0]) for s in lines
}
lines = [x.split() for x in output.decode("utf-8").splitlines()]
return {s[3].strip(): int(s[0]) for s in lines}

View File

@@ -1,15 +1,14 @@
#!/usr/bin/env python3
from __future__ import print_function
from helpers import get_output, get_err, git_ls_files, filter_changed
import argparse
import collections
import os
import re
import sys
sys.path.append(os.path.dirname(__file__))
from helpers import get_output, git_ls_files, filter_changed
curfile = None
@@ -22,26 +21,28 @@ def print_error(file, lineno, msg):
print("\033[0;32m************* File \033[1;32m{}\033[0m".format(file))
curfile = file
print(u'{}:{} - {}'.format(file, lineno, msg))
print("{}:{} - {}".format(file, lineno, msg))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('files', nargs='*', default=[],
help='files to be processed (regex on path)')
parser.add_argument('-c', '--changed', action='store_true',
help='Only run on changed files')
parser.add_argument(
"files", nargs="*", default=[], help="files to be processed (regex on path)"
)
parser.add_argument(
"-c", "--changed", action="store_true", help="Only run on changed files"
)
args = parser.parse_args()
files = []
for path in git_ls_files():
filetypes = ('.py',)
filetypes = (".py",)
ext = os.path.splitext(path)[1]
if ext in filetypes and path.startswith('esphome'):
if ext in filetypes and path.startswith("esphome"):
path = os.path.relpath(path, os.getcwd())
files.append(path)
# Match against re
file_name_re = re.compile('|'.join(args.files))
file_name_re = re.compile("|".join(args.files))
files = [p for p in files if file_name_re.search(p)]
if args.changed:
@@ -52,34 +53,45 @@ def main():
sys.exit(0)
errors = 0
cmd = ['flake8'] + files
cmd = ["black", "--verbose", "--check"] + files
print("Running black...")
log = get_err(*cmd)
for line in log.splitlines():
WOULD_REFORMAT = "would reformat"
if line.startswith(WOULD_REFORMAT):
file_ = line[len(WOULD_REFORMAT) + 1 :]
print_error(file_, None, "Please format this file with the black formatter")
errors += 1
cmd = ["flake8"] + files
print("Running flake8...")
log = get_output(*cmd)
for line in log.splitlines():
line = line.split(':', 4)
line = line.split(":", 4)
if len(line) < 4:
continue
file_ = line[0]
linno = line[1]
msg = (':'.join(line[3:])).strip()
msg = (":".join(line[3:])).strip()
print_error(file_, linno, msg)
errors += 1
cmd = ['pylint', '-f', 'parseable', '--persistent=n'] + files
cmd = ["pylint", "-f", "parseable", "--persistent=n"] + files
print("Running pylint...")
log = get_output(*cmd)
for line in log.splitlines():
line = line.split(':', 3)
line = line.split(":", 3)
if len(line) < 3:
continue
file_ = line[0]
linno = line[1]
msg = (':'.join(line[2:])).strip()
msg = (":".join(line[2:])).strip()
print_error(file_, linno, msg)
errors += 1
sys.exit(errors)
if __name__ == '__main__':
if __name__ == "__main__":
main()

View File

@@ -6,3 +6,5 @@ set -e
cd "$(dirname "$0")/.."
pip3 install -r requirements.txt -r requirements_test.txt
pip3 install -e .
pre-commit install