1
0
mirror of https://github.com/esphome/esphome.git synced 2025-03-14 06:38:17 +00:00

Use the process CPU count to determine how many children to create

Python 3.13+ introduced os.process_cpu_count() which is better
aligned with want we want here

https://docs.python.org/3/library/os.html#os.process_cpu_count

Additionally this avoids bring in multiprocessing which
increases the startup time

reduction in `importtime-waterfall esphome.__main__`

```
        multiprocessing (82)
          multiprocessing.context (188)
            multiprocessing.process (221)
              signal (195)
            multiprocessing.reduction (116)
              pickle (293)
                struct (45)
                  _struct (209)
                _compat_pickle (101)
                _pickle (241)
              socket (405)
                _socket (320)
                array (205)
```
This commit is contained in:
J. Nick Koston 2025-02-18 10:04:04 -06:00
parent 1257640e48
commit 98517d08a0
No known key found for this signature in database
4 changed files with 36 additions and 13 deletions

View File

@ -1,5 +1,4 @@
import logging
import multiprocessing
import os
from pathlib import Path
@ -94,10 +93,15 @@ def valid_project_name(value: str):
return value
def get_usable_cpu_count() -> int:
return (
os.process_cpu_count() if hasattr(os, "process_cpu_count") else os.cpu_count()
)
if "ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT" in os.environ:
_compile_process_limit_default = min(
int(os.environ["ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT"]),
multiprocessing.cpu_count(),
int(os.environ["ESPHOME_DEFAULT_COMPILE_PROCESS_LIMIT"]), get_usable_cpu_count()
)
else:
_compile_process_limit_default = cv.UNDEFINED
@ -156,7 +160,7 @@ CONFIG_SCHEMA = cv.All(
),
cv.Optional(
CONF_COMPILE_PROCESS_LIMIT, default=_compile_process_limit_default
): cv.int_range(min=1, max=multiprocessing.cpu_count()),
): cv.int_range(min=1, max=get_usable_cpu_count()),
}
),
validate_hostname,

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python3
import argparse
import multiprocessing
import os
import queue
import re
@ -11,7 +10,13 @@ import threading
import click
import colorama
from helpers import filter_changed, get_binary, git_ls_files, print_error_for_file
from helpers import (
filter_changed,
get_binary,
get_usable_cpu_count,
git_ls_files,
print_error_for_file,
)
def run_format(executable, args, queue, lock, failed_files):
@ -25,7 +30,9 @@ def run_format(executable, args, queue, lock, failed_files):
invocation.extend(["--dry-run", "-Werror"])
invocation.append(path)
proc = subprocess.run(invocation, capture_output=True, encoding="utf-8")
proc = subprocess.run(
invocation, capture_output=True, encoding="utf-8", check=False
)
if proc.returncode != 0:
with lock:
print_error_for_file(path, proc.stderr)
@ -45,7 +52,7 @@ def main():
"-j",
"--jobs",
type=int,
default=multiprocessing.cpu_count(),
default=get_usable_cpu_count(),
help="number of format instances to be run in parallel.",
)
parser.add_argument(
@ -80,7 +87,8 @@ def main():
lock = threading.Lock()
for _ in range(args.jobs):
t = threading.Thread(
target=run_format, args=(executable, args, task_queue, lock, failed_files)
target=run_format,
args=(executable, args, task_queue, lock, failed_files),
)
t.daemon = True
t.start()
@ -95,7 +103,7 @@ def main():
# Wait for all threads to be done.
task_queue.join()
except FileNotFoundError as ex:
except FileNotFoundError:
return 1
except KeyboardInterrupt:
print()
@ -103,7 +111,7 @@ def main():
# Kill subprocesses (and ourselves!)
# No simple, clean alternative appears to be available.
os.kill(0, 9)
return 2 # Will not execute.
return 2 # Will not execute.
return len(failed_files)

View File

@ -1,7 +1,6 @@
#!/usr/bin/env python3
import argparse
import multiprocessing
import os
import queue
import re
@ -19,6 +18,7 @@ from helpers import (
filter_changed,
filter_grep,
get_binary,
get_usable_cpu_count,
git_ls_files,
load_idedata,
print_error_for_file,
@ -170,7 +170,7 @@ def main():
"-j",
"--jobs",
type=int,
default=multiprocessing.cpu_count(),
default=get_usable_cpu_count(),
help="number of tidy instances to be run in parallel.",
)
parser.add_argument(

View File

@ -188,3 +188,14 @@ def get_binary(name: str, version: str) -> str:
"""
)
raise
def get_usable_cpu_count() -> int:
"""Return the number of CPUs that can be used for processes.
On Python 3.13+ this is the number of CPUs that can be used for processes.
On older Python versions this is the number of CPUs.
"""
return (
os.process_cpu_count() if hasattr(os, "process_cpu_count") else os.cpu_count()
)