1
0
mirror of https://github.com/esphome/esphome.git synced 2025-03-14 06:38:17 +00:00

Merge branch 'dev' into zc01451

This commit is contained in:
J. Nick Koston 2025-02-18 11:12:12 -06:00 committed by GitHub
commit 1a0b50a752
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 41 additions and 14 deletions

View File

@ -35,7 +35,7 @@ RUN \
iputils-ping=3:20221126-1+deb12u1 \
git=1:2.39.5-0+deb12u1 \
curl=7.88.1-10+deb12u8 \
openssh-client=1:9.2p1-2+deb12u3 \
openssh-client=1:9.2p1-2+deb12u4 \
python3-cffi=1.15.1-5 \
libcairo2=1.16.0-7 \
libmagic1=1:5.44-3 \

View File

@ -1,5 +1,4 @@
import logging
import multiprocessing
import os
from pathlib import Path
@ -94,10 +93,19 @@ def valid_project_name(value: str):
return value
def get_usable_cpu_count() -> int:
"""Return the number of CPUs that can be used for processes.
On Python 3.13+ this is the number of CPUs that can be used for processes.
On older Python versions this is the number of CPUs.
"""
return (
os.process_cpu_count() if hasattr(os, "process_cpu_count") else os.cpu_count()
)
if "ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT" in os.environ:
_compile_process_limit_default = min(
int(os.environ["ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT"]),
multiprocessing.cpu_count(),
int(os.environ["ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT"]), get_usable_cpu_count()
)
else:
_compile_process_limit_default = cv.UNDEFINED
@ -156,7 +164,7 @@ CONFIG_SCHEMA = cv.All(
),
cv.Optional(
CONF_COMPILE_PROCESS_LIMIT, default=_compile_process_limit_default
): cv.int_range(min=1, max=multiprocessing.cpu_count()),
): cv.int_range(min=1, max=get_usable_cpu_count()),
}
),
validate_hostname,

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python3
import argparse
import multiprocessing
import os
import queue
import re
@ -11,7 +10,13 @@ import threading
import click
import colorama
from helpers import filter_changed, get_binary, git_ls_files, print_error_for_file
from helpers import (
filter_changed,
get_binary,
get_usable_cpu_count,
git_ls_files,
print_error_for_file,
)
def run_format(executable, args, queue, lock, failed_files):
@ -25,7 +30,9 @@ def run_format(executable, args, queue, lock, failed_files):
invocation.extend(["--dry-run", "-Werror"])
invocation.append(path)
proc = subprocess.run(invocation, capture_output=True, encoding="utf-8")
proc = subprocess.run(
invocation, capture_output=True, encoding="utf-8", check=False
)
if proc.returncode != 0:
with lock:
print_error_for_file(path, proc.stderr)
@ -45,7 +52,7 @@ def main():
"-j",
"--jobs",
type=int,
default=multiprocessing.cpu_count(),
default=get_usable_cpu_count(),
help="number of format instances to be run in parallel.",
)
parser.add_argument(
@ -80,7 +87,8 @@ def main():
lock = threading.Lock()
for _ in range(args.jobs):
t = threading.Thread(
target=run_format, args=(executable, args, task_queue, lock, failed_files)
target=run_format,
args=(executable, args, task_queue, lock, failed_files),
)
t.daemon = True
t.start()
@ -95,7 +103,7 @@ def main():
# Wait for all threads to be done.
task_queue.join()
except FileNotFoundError as ex:
except FileNotFoundError:
return 1
except KeyboardInterrupt:
print()
@ -103,7 +111,7 @@ def main():
# Kill subprocesses (and ourselves!)
# No simple, clean alternative appears to be available.
os.kill(0, 9)
return 2 # Will not execute.
return 2 # Will not execute.
return len(failed_files)

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python3
import argparse
import multiprocessing
import os
import queue
import re
@ -19,6 +18,7 @@ from helpers import (
filter_changed,
filter_grep,
get_binary,
get_usable_cpu_count,
git_ls_files,
load_idedata,
print_error_for_file,
@ -170,7 +170,7 @@ def main():
"-j",
"--jobs",
type=int,
default=multiprocessing.cpu_count(),
default=get_usable_cpu_count(),
help="number of tidy instances to be run in parallel.",
)
parser.add_argument(

View File

@ -188,3 +188,14 @@ def get_binary(name: str, version: str) -> str:
"""
)
raise
def get_usable_cpu_count() -> int:
"""Return the number of CPUs that can be used for processes.
On Python 3.13+ this is the number of CPUs that can be used for processes.
On older Python versions this is the number of CPUs.
"""
return (
os.process_cpu_count() if hasattr(os, "process_cpu_count") else os.cpu_count()
)